Pylint - Enable more rules on source and tests-directory (#4929)

This commit is contained in:
Bert Blommers 2022-03-11 20:28:45 -01:00 committed by GitHub
parent e03ec432d0
commit eed32a5f72
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
176 changed files with 795 additions and 1228 deletions

View File

@ -17,7 +17,7 @@ init:
lint:
@echo "Running flake8..."
flake8 moto
flake8 moto tests
@echo "Running black... "
@echo "(Make sure you have black-22.1.0 installed, as other versions will produce different results)"
black --check moto/ tests/

View File

@ -535,11 +535,13 @@ class AWSCertificateManagerBackend(BaseBackend):
def request_certificate(
self,
domain_name,
domain_validation_options,
idempotency_token,
subject_alt_names,
tags=None,
):
"""
The parameter DomainValidationOptions has not yet been implemented
"""
if idempotency_token is not None:
arn = self._get_arn_from_idempotency_token(idempotency_token)
if arn and self._certificates[arn].tags.equals(tags):

View File

@ -23,8 +23,8 @@ class AWSCertificateManagerResponse(BaseResponse):
except ValueError:
return {}
def _get_param(self, param, default=None):
return self.request_params.get(param, default)
def _get_param(self, param_name, if_none=None):
return self.request_params.get(param_name, if_none)
def add_tags_to_certificate(self):
arn = self._get_param("CertificateArn")
@ -205,9 +205,6 @@ class AWSCertificateManagerResponse(BaseResponse):
def request_certificate(self):
domain_name = self._get_param("DomainName")
domain_validation_options = self._get_param(
"DomainValidationOptions"
) # is ignored atm
idempotency_token = self._get_param("IdempotencyToken")
subject_alt_names = self._get_param("SubjectAlternativeNames")
tags = self._get_param("Tags") # Optional
@ -225,7 +222,6 @@ class AWSCertificateManagerResponse(BaseResponse):
try:
arn = self.acm_backend.request_certificate(
domain_name,
domain_validation_options,
idempotency_token,
subject_alt_names,
tags,

View File

@ -85,7 +85,7 @@ class NoIntegrationDefined(NotFoundException):
class NoIntegrationResponseDefined(NotFoundException):
code = 404
def __init__(self, code=None):
def __init__(self):
super().__init__("Invalid Response status code specified")

View File

@ -0,0 +1,7 @@
import abc
class IntegrationParser:
@abc.abstractmethod
def invoke(self, request, integration):
pass

View File

@ -1,7 +1,9 @@
import requests
from . import IntegrationParser
class TypeAwsParser:
class TypeAwsParser(IntegrationParser):
def invoke(self, request, integration):
# integration.uri = arn:aws:apigateway:{region}:{subdomain.service|service}:path|action/{service_api}
# example value = 'arn:aws:apigateway:us-west-2:dynamodb:action/PutItem'

View File

@ -1,7 +1,9 @@
import requests
from . import IntegrationParser
class TypeHttpParser:
class TypeHttpParser(IntegrationParser):
"""
Parse invocations to a APIGateway resource with integration type HTTP
"""

View File

@ -1,4 +1,7 @@
class TypeUnknownParser:
from . import IntegrationParser
class TypeUnknownParser(IntegrationParser):
"""
Parse invocations to a APIGateway resource with an unknown integration type
"""

View File

@ -152,7 +152,7 @@ class Integration(BaseModel, dict):
def get_integration_response(self, status_code):
result = self.get("integrationResponses", {}).get(status_code)
if not result:
raise NoIntegrationResponseDefined(status_code)
raise NoIntegrationResponseDefined()
return result
def delete_integration_response(self, status_code):
@ -597,7 +597,7 @@ class ApiKey(BaseModel, dict):
name=None,
description=None,
enabled=False,
generateDistinctId=False,
generateDistinctId=False, # pylint: disable=unused-argument
value=None,
stageKeys=None,
tags=None,
@ -605,10 +605,8 @@ class ApiKey(BaseModel, dict):
):
super().__init__()
self["id"] = create_id()
self["value"] = (
value
if value
else "".join(random.sample(string.ascii_letters + string.digits, 40))
self["value"] = value or "".join(
random.sample(string.ascii_letters + string.digits, 40)
)
self["name"] = name
self["customerId"] = customerId
@ -846,8 +844,8 @@ class RestAPI(CloudFormationModel):
self.description = ""
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["RootResourceId"]
def has_cfn_attr(cls, attr):
return attr in ["RootResourceId"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException

View File

@ -996,16 +996,13 @@ class ApiGatewayV2Backend(BaseBackend):
self,
api_key_selection_expression,
cors_configuration,
credentials_arn,
description,
disable_schema_validation,
disable_execute_api_endpoint,
name,
protocol_type,
route_key,
route_selection_expression,
tags,
target,
version,
):
"""

View File

@ -206,16 +206,13 @@ class ApiGatewayV2Response(BaseResponse):
api_key_selection_expression = params.get("apiKeySelectionExpression")
cors_configuration = params.get("corsConfiguration")
credentials_arn = params.get("credentialsArn")
description = params.get("description")
disable_schema_validation = params.get("disableSchemaValidation")
disable_execute_api_endpoint = params.get("disableExecuteApiEndpoint")
name = params.get("name")
protocol_type = params.get("protocolType")
route_key = params.get("routeKey")
route_selection_expression = params.get("routeSelectionExpression")
tags = params.get("tags")
target = params.get("target")
version = params.get("version")
if protocol_type not in ["HTTP", "WEBSOCKET"]:
@ -224,16 +221,13 @@ class ApiGatewayV2Response(BaseResponse):
api = self.apigatewayv2_backend.create_api(
api_key_selection_expression=api_key_selection_expression,
cors_configuration=cors_configuration,
credentials_arn=credentials_arn,
description=description,
disable_schema_validation=disable_schema_validation,
disable_execute_api_endpoint=disable_execute_api_endpoint,
name=name,
protocol_type=protocol_type,
route_key=route_key,
route_selection_expression=route_selection_expression,
tags=tags,
target=target,
version=version,
)
return 200, {}, json.dumps(api.to_json())

View File

@ -521,11 +521,8 @@ class FakeAutoScalingGroup(CloudFormationModel):
launch_config_name,
launch_template,
vpc_zone_identifier,
default_cooldown,
health_check_period,
health_check_type,
placement_group,
termination_policies,
new_instances_protected_from_scale_in=None,
):
self._set_azs_and_vpcs(availability_zones, vpc_zone_identifier, update=True)
@ -808,13 +805,13 @@ class AutoScalingBackend(BaseBackend):
launch_config_name,
launch_template,
vpc_zone_identifier,
default_cooldown,
health_check_period,
health_check_type,
placement_group,
termination_policies,
new_instances_protected_from_scale_in=None,
):
"""
The parameter DefaultCooldown, PlacementGroup, TerminationPolicies are not yet implemented
"""
# TODO: Add MixedInstancesPolicy once implemented.
# Verify only a single launch config-like parameter is provided.
if launch_config_name and launch_template:
@ -832,11 +829,8 @@ class AutoScalingBackend(BaseBackend):
launch_config_name=launch_config_name,
launch_template=launch_template,
vpc_zone_identifier=vpc_zone_identifier,
default_cooldown=default_cooldown,
health_check_period=health_check_period,
health_check_type=health_check_type,
placement_group=placement_group,
termination_policies=termination_policies,
new_instances_protected_from_scale_in=new_instances_protected_from_scale_in,
)
return group
@ -888,9 +882,10 @@ class AutoScalingBackend(BaseBackend):
self.update_attached_elbs(group.name)
self.update_attached_target_groups(group.name)
def set_instance_health(
self, instance_id, health_status, should_respect_grace_period
):
def set_instance_health(self, instance_id, health_status):
"""
The ShouldRespectGracePeriod-parameter is not yet implemented
"""
instance = self.ec2_backend.get_instance(instance_id)
instance_state = next(
instance_state

View File

@ -114,10 +114,7 @@ class AutoScalingResponse(BaseResponse):
health_status = self._get_param("HealthStatus")
if health_status not in ["Healthy", "Unhealthy"]:
raise ValueError("Valid instance health states are: [Healthy, Unhealthy]")
should_respect_grace_period = self._get_param("ShouldRespectGracePeriod")
self.autoscaling_backend.set_instance_health(
instance_id, health_status, should_respect_grace_period
)
self.autoscaling_backend.set_instance_health(instance_id, health_status)
template = self.response_template(SET_INSTANCE_HEALTH_TEMPLATE)
return template.render()
@ -200,11 +197,8 @@ class AutoScalingResponse(BaseResponse):
launch_config_name=self._get_param("LaunchConfigurationName"),
launch_template=self._get_dict_param("LaunchTemplate."),
vpc_zone_identifier=self._get_param("VPCZoneIdentifier"),
default_cooldown=self._get_int_param("DefaultCooldown"),
health_check_period=self._get_int_param("HealthCheckGracePeriod"),
health_check_type=self._get_param("HealthCheckType"),
placement_group=self._get_param("PlacementGroup"),
termination_policies=self._get_multi_param("TerminationPolicies.member"),
new_instances_protected_from_scale_in=self._get_bool_param(
"NewInstancesProtectedFromScaleIn", None
),

View File

@ -312,7 +312,7 @@ class Layer(object):
class LambdaFunction(CloudFormationModel, DockerModel):
def __init__(self, spec, region, validate_s3=True, version=1):
def __init__(self, spec, region, version=1):
DockerModel.__init__(self)
# required
self.region = region
@ -547,14 +547,12 @@ class LambdaFunction(CloudFormationModel, DockerModel):
except Exception:
return s
def _invoke_lambda(self, code, event=None, context=None):
def _invoke_lambda(self, event=None):
# Create the LogGroup if necessary, to write the result to
self.logs_backend.ensure_log_group(self.logs_group_name, [])
# TODO: context not yet implemented
if event is None:
event = dict()
if context is None:
context = {}
output = None
try:
@ -670,7 +668,7 @@ class LambdaFunction(CloudFormationModel, DockerModel):
for line in output.splitlines()
]
self.logs_backend.put_log_events(
self.logs_group_name, log_stream_name, log_events, None
self.logs_group_name, log_stream_name, log_events
)
def invoke(self, body, request_headers, response_headers):
@ -681,7 +679,7 @@ class LambdaFunction(CloudFormationModel, DockerModel):
body = "{}"
# Get the invocation type:
res, errored, logs = self._invoke_lambda(code=self.code, event=body)
res, errored, logs = self._invoke_lambda(event=body)
inv_type = request_headers.get("x-amz-invocation-type", "RequestResponse")
if inv_type == "RequestResponse":
encoded = base64.b64encode(logs.encode("utf-8"))
@ -746,8 +744,8 @@ class LambdaFunction(CloudFormationModel, DockerModel):
return fn
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -758,7 +756,7 @@ class LambdaFunction(CloudFormationModel, DockerModel):
@classmethod
def update_from_cloudformation_json(
cls, new_resource_name, cloudformation_json, original_resource, region_name
cls, original_resource, new_resource_name, cloudformation_json, region_name
):
updated_props = cloudformation_json["Properties"]
original_resource.update_configuration(updated_props)
@ -879,7 +877,7 @@ class EventSourceMapping(CloudFormationModel):
@classmethod
def update_from_cloudformation_json(
cls, new_resource_name, cloudformation_json, original_resource, region_name
cls, original_resource, new_resource_name, cloudformation_json, region_name
):
properties = cloudformation_json["Properties"]
event_source_uuid = original_resource.uuid

View File

@ -1,10 +1,7 @@
import json
import sys
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from urllib.parse import unquote
from functools import wraps
from moto.core.utils import amz_crc32, amzn_request_id, path_url
@ -46,9 +43,9 @@ class LambdaResponse(BaseResponse):
def root(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "GET":
return self._list_functions(request, full_url, headers)
return self._list_functions()
elif request.method == "POST":
return self._create_function(request, full_url, headers)
return self._create_function()
else:
raise ValueError("Cannot handle request")
@ -60,7 +57,7 @@ class LambdaResponse(BaseResponse):
function_name = querystring.get("FunctionName", [None])[0]
return self._list_event_source_mappings(event_source_arn, function_name)
elif request.method == "POST":
return self._create_event_source_mapping(request, full_url, headers)
return self._create_event_source_mapping()
else:
raise ValueError("Cannot handle request")
@ -80,21 +77,21 @@ class LambdaResponse(BaseResponse):
def list_layers(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "GET":
return self._list_layers(request, headers)
return self._list_layers()
def layers_versions(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "GET":
return self._get_layer_versions(request, full_url, headers)
return self._get_layer_versions()
if request.method == "POST":
return self._publish_layer_version(request, full_url, headers)
return self._publish_layer_version()
def function(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "GET":
return self._get_function(request, full_url, headers)
return self._get_function()
elif request.method == "DELETE":
return self._delete_function(request, full_url, headers)
return self._delete_function()
else:
raise ValueError("Cannot handle request")
@ -108,7 +105,7 @@ class LambdaResponse(BaseResponse):
return self._list_versions_by_function(function_name)
elif request.method == "POST":
return self._publish_function(request, full_url, headers)
return self._publish_function()
else:
raise ValueError("Cannot handle request")
@ -117,7 +114,7 @@ class LambdaResponse(BaseResponse):
def invoke(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "POST":
return self._invoke(request, full_url)
return self._invoke(request)
else:
raise ValueError("Cannot handle request")
@ -126,18 +123,18 @@ class LambdaResponse(BaseResponse):
def invoke_async(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "POST":
return self._invoke_async(request, full_url)
return self._invoke_async()
else:
raise ValueError("Cannot handle request")
def tag(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "GET":
return self._list_tags(request, full_url)
return self._list_tags()
elif request.method == "POST":
return self._tag_resource(request, full_url)
return self._tag_resource()
elif request.method == "DELETE":
return self._untag_resource(request, full_url)
return self._untag_resource()
else:
raise ValueError("Cannot handle {0} request".format(request.method))
@ -145,20 +142,20 @@ class LambdaResponse(BaseResponse):
def policy(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "GET":
return self._get_policy(request, full_url, headers)
return self._get_policy(request)
elif request.method == "POST":
return self._add_policy(request, full_url, headers)
return self._add_policy(request)
elif request.method == "DELETE":
return self._del_policy(request, full_url, headers, self.querystring)
return self._del_policy(request, self.querystring)
else:
raise ValueError("Cannot handle {0} request".format(request.method))
def configuration(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == "PUT":
return self._put_configuration(request)
return self._put_configuration()
if request.method == "GET":
return self._get_function_configuration(request, full_url, headers)
return self._get_function_configuration()
else:
raise ValueError("Cannot handle request")
@ -179,15 +176,15 @@ class LambdaResponse(BaseResponse):
self.setup_class(request, full_url, headers)
if http_method == "GET":
return self._get_function_concurrency(request)
return self._get_function_concurrency()
elif http_method == "DELETE":
return self._delete_function_concurrency(request)
return self._delete_function_concurrency()
elif http_method == "PUT":
return self._put_function_concurrency(request)
return self._put_function_concurrency()
else:
raise ValueError("Cannot handle request")
def _add_policy(self, request, full_url, headers):
def _add_policy(self, request):
path = request.path if hasattr(request, "path") else path_url(request.url)
function_name = unquote(path.split("/")[-2])
if self.lambda_backend.get_function(function_name):
@ -197,7 +194,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {}, "{}"
def _get_policy(self, request, full_url, headers):
def _get_policy(self, request):
path = request.path if hasattr(request, "path") else path_url(request.url)
function_name = unquote(path.split("/")[-2])
if self.lambda_backend.get_function(function_name):
@ -206,7 +203,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {}, "{}"
def _del_policy(self, request, full_url, headers, querystring):
def _del_policy(self, request, querystring):
path = request.path if hasattr(request, "path") else path_url(request.url)
function_name = unquote(path.split("/")[-3])
statement_id = path.split("/")[-1].split("?")[0]
@ -217,7 +214,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {}, "{}"
def _invoke(self, request, full_url):
def _invoke(self, request):
response_headers = {}
# URL Decode in case it's a ARN:
@ -251,7 +248,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, response_headers, "{}"
def _invoke_async(self, request, full_url):
def _invoke_async(self):
response_headers = {}
function_name = unquote(self.path.rsplit("/", 3)[-3])
@ -264,7 +261,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, response_headers, "{}"
def _list_functions(self, request, full_url, headers):
def _list_functions(self):
querystring = self.querystring
func_version = querystring.get("FunctionVersion", [None])[0]
result = {"Functions": []}
@ -286,12 +283,12 @@ class LambdaResponse(BaseResponse):
return 200, {}, json.dumps(result)
def _create_function(self, request, full_url, headers):
def _create_function(self):
fn = self.lambda_backend.create_function(self.json_body)
config = fn.get_configuration()
return 201, {}, json.dumps(config)
def _create_event_source_mapping(self, request, full_url, headers):
def _create_event_source_mapping(self):
fn = self.lambda_backend.create_event_source_mapping(self.json_body)
config = fn.get_configuration()
return 201, {}, json.dumps(config)
@ -326,7 +323,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {}, "{}"
def _publish_function(self, request, full_url, headers):
def _publish_function(self):
function_name = self.path.rsplit("/", 2)[-2]
description = self._get_param("Description")
@ -337,7 +334,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {}, "{}"
def _delete_function(self, request, full_url, headers):
def _delete_function(self):
function_name = unquote(self.path.rsplit("/", 1)[-1])
qualifier = self._get_param("Qualifier", None)
@ -354,7 +351,7 @@ class LambdaResponse(BaseResponse):
configuration["FunctionArn"] += ":$LATEST"
return configuration
def _get_function(self, request, full_url, headers):
def _get_function(self):
function_name = unquote(self.path.rsplit("/", 1)[-1])
qualifier = self._get_param("Qualifier", None)
@ -369,7 +366,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {"x-amzn-ErrorType": "ResourceNotFoundException"}, "{}"
def _get_function_configuration(self, request, full_url, headers):
def _get_function_configuration(self):
function_name = unquote(self.path.rsplit("/", 2)[-2])
qualifier = self._get_param("Qualifier", None)
@ -390,7 +387,7 @@ class LambdaResponse(BaseResponse):
else:
return self.default_region
def _list_tags(self, request, full_url):
def _list_tags(self):
function_arn = unquote(self.path.rsplit("/", 1)[-1])
fn = self.lambda_backend.get_function_by_arn(function_arn)
@ -399,7 +396,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {}, "{}"
def _tag_resource(self, request, full_url):
def _tag_resource(self):
function_arn = unquote(self.path.rsplit("/", 1)[-1])
if self.lambda_backend.tag_resource(function_arn, self.json_body["Tags"]):
@ -407,7 +404,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {}, "{}"
def _untag_resource(self, request, full_url):
def _untag_resource(self):
function_arn = unquote(self.path.rsplit("/", 1)[-1])
tag_keys = self.querystring["tagKeys"]
@ -416,7 +413,7 @@ class LambdaResponse(BaseResponse):
else:
return 404, {}, "{}"
def _put_configuration(self, request):
def _put_configuration(self):
function_name = unquote(self.path.rsplit("/", 2)[-2])
qualifier = self._get_param("Qualifier", None)
resp = self.lambda_backend.update_function_configuration(
@ -445,7 +442,7 @@ class LambdaResponse(BaseResponse):
resp = self.lambda_backend.get_code_signing_config(function_name)
return 200, {}, json.dumps(resp)
def _get_function_concurrency(self, request):
def _get_function_concurrency(self):
path_function_name = unquote(self.path.rsplit("/", 2)[-2])
function_name = self.lambda_backend.get_function(path_function_name)
@ -455,7 +452,7 @@ class LambdaResponse(BaseResponse):
resp = self.lambda_backend.get_function_concurrency(path_function_name)
return 200, {}, json.dumps({"ReservedConcurrentExecutions": resp})
def _delete_function_concurrency(self, request):
def _delete_function_concurrency(self):
path_function_name = unquote(self.path.rsplit("/", 2)[-2])
function_name = self.lambda_backend.get_function(path_function_name)
@ -466,7 +463,7 @@ class LambdaResponse(BaseResponse):
return 204, {}, "{}"
def _put_function_concurrency(self, request):
def _put_function_concurrency(self):
path_function_name = unquote(self.path.rsplit("/", 2)[-2])
function = self.lambda_backend.get_function(path_function_name)
@ -480,11 +477,11 @@ class LambdaResponse(BaseResponse):
return 200, {}, json.dumps({"ReservedConcurrentExecutions": resp})
def _list_layers(self, request, headers):
def _list_layers(self):
layers = self.lambda_backend.list_layers()
return (200, {}, json.dumps({"Layers": layers}))
return 200, {}, json.dumps({"Layers": layers})
def _get_layer_versions(self, request, full_url, headers):
def _get_layer_versions(self):
layer_name = self.path.rsplit("/", 2)[-2]
layer_versions = self.lambda_backend.get_layer_versions(layer_name)
return (
@ -495,7 +492,7 @@ class LambdaResponse(BaseResponse):
),
)
def _publish_layer_version(self, request, full_url, headers):
def _publish_layer_version(self):
spec = self.json_body
if "LayerName" not in spec:
spec["LayerName"] = self.path.rsplit("/", 2)[-2]

View File

@ -703,7 +703,7 @@ class Job(threading.Thread, BaseModel, DockerModel):
self.log_stream_name = stream_name
self._log_backend.ensure_log_group(log_group, None)
self._log_backend.create_log_stream(log_group, stream_name)
self._log_backend.put_log_events(log_group, stream_name, logs, None)
self._log_backend.put_log_events(log_group, stream_name, logs)
result = container.wait() or {}
self.exit_code = result.get("StatusCode", 0)
@ -961,9 +961,10 @@ class BatchBackend(BaseBackend):
except KeyError:
return None
def describe_compute_environments(
self, environments=None, max_results=None, next_token=None
):
def describe_compute_environments(self, environments=None):
"""
Pagination is not yet implemented
"""
envs = set()
if environments is not None:
envs = set(environments)
@ -1329,7 +1330,10 @@ class BatchBackend(BaseBackend):
return queue_name, queue.arn
def describe_job_queues(self, job_queues=None, max_results=None, next_token=None):
def describe_job_queues(self, job_queues=None):
"""
Pagination is not yet implemented
"""
envs = set()
if job_queues is not None:
envs = set(job_queues)
@ -1465,13 +1469,11 @@ class BatchBackend(BaseBackend):
self._job_definitions[job_def.arn].deregister()
def describe_job_definitions(
self,
job_def_name=None,
job_def_list=None,
status=None,
max_results=None,
next_token=None,
self, job_def_name=None, job_def_list=None, status=None
):
"""
Pagination is not yet implemented
"""
jobs = []
# As a job name can reference multiple revisions, we get a list of them
@ -1499,14 +1501,13 @@ class BatchBackend(BaseBackend):
job_name,
job_def_id,
job_queue,
parameters=None,
retries=None,
depends_on=None,
container_overrides=None,
timeout=None,
):
# TODO parameters, retries (which is a dict raw from request), job dependencies and container overrides are ignored for now
"""
Parameters RetryStrategy and Parameters are not yet implemented.
"""
# Look for job definition
job_def = self.get_job_definition(job_def_id)
if job_def is None:
@ -1549,7 +1550,10 @@ class BatchBackend(BaseBackend):
return result
def list_jobs(self, job_queue, job_status=None, max_results=None, next_token=None):
def list_jobs(self, job_queue, job_status=None):
"""
Pagination is not yet implemented
"""
jobs = []
job_queue = self.get_job_queue(job_queue)

View File

@ -66,12 +66,8 @@ class BatchResponse(BaseResponse):
# DescribeComputeEnvironments
def describecomputeenvironments(self):
compute_environments = self._get_param("computeEnvironments")
max_results = self._get_param("maxResults") # Ignored, should be int
next_token = self._get_param("nextToken") # Ignored
envs = self.batch_backend.describe_compute_environments(
compute_environments, max_results=max_results, next_token=next_token
)
envs = self.batch_backend.describe_compute_environments(compute_environments)
result = {"computeEnvironments": envs}
return json.dumps(result)
@ -134,12 +130,8 @@ class BatchResponse(BaseResponse):
# DescribeJobQueues
def describejobqueues(self):
job_queues = self._get_param("jobQueues")
max_results = self._get_param("maxResults") # Ignored, should be int
next_token = self._get_param("nextToken") # Ignored
queues = self.batch_backend.describe_job_queues(
job_queues, max_results=max_results, next_token=next_token
)
queues = self.batch_backend.describe_job_queues(job_queues)
result = {"jobQueues": queues}
return json.dumps(result)
@ -219,12 +211,10 @@ class BatchResponse(BaseResponse):
def describejobdefinitions(self):
job_def_name = self._get_param("jobDefinitionName")
job_def_list = self._get_param("jobDefinitions")
max_results = self._get_param("maxResults")
next_token = self._get_param("nextToken")
status = self._get_param("status")
job_defs = self.batch_backend.describe_job_definitions(
job_def_name, job_def_list, status, max_results, next_token
job_def_name, job_def_list, status
)
result = {"jobDefinitions": [job.describe() for job in job_defs]}
@ -237,8 +227,6 @@ class BatchResponse(BaseResponse):
job_def = self._get_param("jobDefinition")
job_name = self._get_param("jobName")
job_queue = self._get_param("jobQueue")
parameters = self._get_param("parameters")
retries = self._get_param("retryStrategy")
timeout = self._get_param("timeout")
try:
@ -246,8 +234,6 @@ class BatchResponse(BaseResponse):
job_name,
job_def,
job_queue,
parameters=parameters,
retries=retries,
depends_on=depends_on,
container_overrides=container_overrides,
timeout=timeout,
@ -272,13 +258,9 @@ class BatchResponse(BaseResponse):
def listjobs(self):
job_queue = self._get_param("jobQueue")
job_status = self._get_param("jobStatus")
max_results = self._get_param("maxResults")
next_token = self._get_param("nextToken")
try:
jobs = self.batch_backend.list_jobs(
job_queue, job_status, max_results, next_token
)
jobs = self.batch_backend.list_jobs(job_queue, job_status)
except AWSError as err:
return err.response()

View File

@ -85,7 +85,7 @@ class CustomModel(CloudFormationModel):
return custom_resource
@classmethod
def has_cfn_attr(cls, attribute):
def has_cfn_attr(cls, attr): # pylint: disable=unused-argument
# We don't know which attributes are supported for third-party resources
return True

View File

@ -129,7 +129,7 @@ class FakeStackSet(BaseModel):
if not parameters:
parameters = self.parameters
self.instances.create_instances(accounts, regions, parameters, operation_id)
self.instances.create_instances(accounts, regions, parameters)
self._create_operation(
operation_id=operation_id,
action="CREATE",
@ -176,7 +176,7 @@ class FakeStackInstances(BaseModel):
self.stackset_name = stackset_name
self.stack_instances = []
def create_instances(self, accounts, regions, parameters, operation_id):
def create_instances(self, accounts, regions, parameters):
new_instances = []
for region in regions:
for account in accounts:
@ -524,6 +524,12 @@ class CloudFormationBackend(BaseBackend):
self.deleted_stacks = {}
self.exports = OrderedDict()
self.change_sets = OrderedDict()
self.region = region
def reset(self):
region = self.region
self.__dict__ = {}
self.__init__(region)
@staticmethod
def default_vpc_endpoint_service(service_region, zones):
@ -561,7 +567,6 @@ class CloudFormationBackend(BaseBackend):
parameters,
tags=None,
description=None,
region="us-east-1",
admin_role=None,
execution_role=None,
):
@ -651,18 +656,17 @@ class CloudFormationBackend(BaseBackend):
name,
template,
parameters,
region_name,
notification_arns=None,
tags=None,
role_arn=None,
):
stack_id = generate_stack_id(name, region_name)
stack_id = generate_stack_id(name, self.region)
new_stack = FakeStack(
stack_id=stack_id,
name=name,
template=template,
parameters=parameters,
region_name=region_name,
region_name=self.region,
notification_arns=notification_arns,
tags=tags,
role_arn=role_arn,
@ -685,7 +689,6 @@ class CloudFormationBackend(BaseBackend):
template,
parameters,
description,
region_name,
change_set_type,
notification_arns=None,
tags=None,
@ -698,13 +701,13 @@ class CloudFormationBackend(BaseBackend):
else:
raise ValidationError(stack_name)
else:
stack_id = generate_stack_id(stack_name, region_name)
stack_id = generate_stack_id(stack_name, self.region)
stack = FakeStack(
stack_id=stack_id,
name=stack_name,
template={},
parameters=parameters,
region_name=region_name,
region_name=self.region,
notification_arns=notification_arns,
tags=tags,
role_arn=role_arn,
@ -715,7 +718,7 @@ class CloudFormationBackend(BaseBackend):
"REVIEW_IN_PROGRESS", resource_status_reason="User Initiated"
)
change_set_id = generate_changeset_id(change_set_name, region_name)
change_set_id = generate_changeset_id(change_set_name, self.region)
new_change_set = FakeChangeSet(
change_set_type=change_set_type,
@ -734,7 +737,7 @@ class CloudFormationBackend(BaseBackend):
self.change_sets[change_set_id] = new_change_set
return change_set_id, stack.stack_id
def delete_change_set(self, change_set_name, stack_name=None):
def delete_change_set(self, change_set_name):
if change_set_name in self.change_sets:
# This means arn was passed in
del self.change_sets[change_set_name]
@ -745,7 +748,7 @@ class CloudFormationBackend(BaseBackend):
break
del self.change_sets[to_delete]
def describe_change_set(self, change_set_name, stack_name=None):
def describe_change_set(self, change_set_name):
change_set = None
if change_set_name in self.change_sets:
# This means arn was passed in

View File

@ -534,7 +534,7 @@ class ResourceMap(collections_abc.Mapping):
# The Value in SSM parameters is the SSM parameter path
# we need to use ssm_backend to retrieve the
# actual value from parameter store
parameter = ssm_backends[self._region_name].get_parameter(value, False)
parameter = ssm_backends[self._region_name].get_parameter(value)
actual_value = parameter.value
if value_type.find("List") > 0:
return actual_value.split(",")

View File

@ -44,7 +44,7 @@ class CloudFormationResponse(BaseResponse):
return cloudformation_backends[self.region]
@classmethod
def cfnresponse(cls, *args, **kwargs):
def cfnresponse(cls, *args, **kwargs): # pylint: disable=unused-argument
request, full_url, headers = args
full_url += "&Action=ProcessCfnResponse"
return cls.dispatch(request=request, full_url=full_url, headers=headers)
@ -136,7 +136,6 @@ class CloudFormationResponse(BaseResponse):
name=stack_name,
template=stack_body,
parameters=parameters,
region_name=self.region,
notification_arns=stack_notification_arns,
tags=tags,
role_arn=role_arn,
@ -186,7 +185,6 @@ class CloudFormationResponse(BaseResponse):
template=stack_body,
parameters=parameters,
description=description,
region_name=self.region,
notification_arns=stack_notification_arns,
tags=tags,
role_arn=role_arn,
@ -208,12 +206,9 @@ class CloudFormationResponse(BaseResponse):
return template.render(stack_id=stack_id, change_set_id=change_set_id)
def delete_change_set(self):
stack_name = self._get_param("StackName")
change_set_name = self._get_param("ChangeSetName")
self.cloudformation_backend.delete_change_set(
change_set_name=change_set_name, stack_name=stack_name
)
self.cloudformation_backend.delete_change_set(change_set_name=change_set_name)
if self.request_json:
return json.dumps(
{"DeleteChangeSetResponse": {"DeleteChangeSetResult": {}}}
@ -223,10 +218,9 @@ class CloudFormationResponse(BaseResponse):
return template.render()
def describe_change_set(self):
stack_name = self._get_param("StackName")
change_set_name = self._get_param("ChangeSetName")
change_set = self.cloudformation_backend.describe_change_set(
change_set_name=change_set_name, stack_name=stack_name
change_set_name=change_set_name
)
template = self.response_template(DESCRIBE_CHANGE_SET_RESPONSE_TEMPLATE)
return template.render(change_set=change_set)
@ -457,7 +451,6 @@ class CloudFormationResponse(BaseResponse):
stackset_name = self._get_param("StackSetName")
stack_body = self._get_param("TemplateBody")
template_url = self._get_param("TemplateURL")
# role_arn = self._get_param('RoleARN')
parameters_list = self._get_list_prefix("Parameters.member")
tags = dict(
(item["key"], item["value"])
@ -475,11 +468,7 @@ class CloudFormationResponse(BaseResponse):
stack_body = self._get_stack_from_s3_url(template_url)
stackset = self.cloudformation_backend.create_stack_set(
name=stackset_name,
template=stack_body,
parameters=parameters,
tags=tags
# role_arn=role_arn
name=stackset_name, template=stack_body, parameters=parameters, tags=tags
)
if self.request_json:
return json.dumps(

View File

@ -34,6 +34,12 @@ class CodeCommit(BaseModel):
class CodeCommitBackend(BaseBackend):
def __init__(self, region=None):
self.repositories = {}
self.region = region
def reset(self):
region = self.region
self.__dict__ = {}
self.__init__(region)
@staticmethod
def default_vpc_endpoint_service(service_region, zones):
@ -42,13 +48,13 @@ class CodeCommitBackend(BaseBackend):
service_region, zones, "codecommit"
)
def create_repository(self, region, repository_name, repository_description):
def create_repository(self, repository_name, repository_description):
repository = self.repositories.get(repository_name)
if repository:
raise RepositoryNameExistsException(repository_name)
self.repositories[repository_name] = CodeCommit(
region, repository_description, repository_name
self.region, repository_description, repository_name
)
return self.repositories[repository_name].repository_metadata

View File

@ -26,7 +26,6 @@ class CodeCommitResponse(BaseResponse):
raise InvalidRepositoryNameException()
repository_metadata = self.codecommit_backend.create_repository(
self.region,
self._get_param("repositoryName"),
self._get_param("repositoryDescription"),
)

View File

@ -69,6 +69,12 @@ class CodePipeline(BaseModel):
class CodePipelineBackend(BaseBackend):
def __init__(self, region=None):
self.pipelines = {}
self.region = region
def reset(self):
region_name = self.region
self.__dict__ = {}
self.__init__(region_name)
@staticmethod
def default_vpc_endpoint_service(service_region, zones):
@ -81,7 +87,7 @@ class CodePipelineBackend(BaseBackend):
def iam_backend(self):
return iam_backends["global"]
def create_pipeline(self, region, pipeline, tags):
def create_pipeline(self, pipeline, tags):
if pipeline["name"] in self.pipelines:
raise InvalidStructureException(
"A pipeline with the name '{0}' already exists in account '{1}'".format(
@ -108,7 +114,7 @@ class CodePipelineBackend(BaseBackend):
"Pipeline has only 1 stage(s). There should be a minimum of 2 stages in a pipeline"
)
self.pipelines[pipeline["name"]] = CodePipeline(region, pipeline)
self.pipelines[pipeline["name"]] = CodePipeline(self.region, pipeline)
if tags:
self.pipelines[pipeline["name"]].validate_tags(tags)

View File

@ -11,7 +11,7 @@ class CodePipelineResponse(BaseResponse):
def create_pipeline(self):
pipeline, tags = self.codepipeline_backend.create_pipeline(
self.region, self._get_param("pipeline"), self._get_param("tags")
self._get_param("pipeline"), self._get_param("tags")
)
return json.dumps({"pipeline": pipeline, "tags": tags})

View File

@ -120,7 +120,6 @@ class CognitoIdentityBackend(BaseBackend):
identity_pool_id,
identity_pool_name,
allow_unauthenticated,
allow_classic,
login_providers,
provider_name,
provider_arns,
@ -128,6 +127,9 @@ class CognitoIdentityBackend(BaseBackend):
saml_providers,
tags=None,
):
"""
The AllowClassic-parameter has not yet been implemented
"""
pool = self.identity_pools[identity_pool_id]
pool.identity_pool_name = pool.identity_pool_name or identity_pool_name
if allow_unauthenticated is not None:
@ -183,7 +185,10 @@ class CognitoIdentityBackend(BaseBackend):
)
return response
def list_identities(self, identity_pool_id, max_results=123):
def list_identities(self, identity_pool_id):
"""
The MaxResults-parameter has not yet been implemented
"""
response = json.dumps(self.pools_identities[identity_pool_id])
return response

View File

@ -31,7 +31,6 @@ class CognitoIdentityResponse(BaseResponse):
pool_id = self._get_param("IdentityPoolId")
pool_name = self._get_param("IdentityPoolName")
allow_unauthenticated = self._get_bool_param("AllowUnauthenticatedIdentities")
allow_classic = self._get_bool_param("AllowClassicFlow")
login_providers = self._get_param("SupportedLoginProviders")
provider_name = self._get_param("DeveloperProviderName")
provider_arns = self._get_param("OpenIdConnectProviderARNs")
@ -43,7 +42,6 @@ class CognitoIdentityResponse(BaseResponse):
identity_pool_id=pool_id,
identity_pool_name=pool_name,
allow_unauthenticated=allow_unauthenticated,
allow_classic=allow_classic,
login_providers=login_providers,
provider_name=provider_name,
provider_arns=provider_arns,

View File

@ -1463,7 +1463,7 @@ class CognitoIdpBackend(BaseBackend):
user_pool.users[user.username] = user
return user
def confirm_sign_up(self, client_id, username, confirmation_code):
def confirm_sign_up(self, client_id, username):
user_pool = None
for p in self.user_pools.values():
if client_id in p.clients:
@ -1616,7 +1616,10 @@ class CognitoIdpBackend(BaseBackend):
else:
raise NotAuthorizedError(access_token)
def verify_software_token(self, access_token, user_code):
def verify_software_token(self, access_token):
"""
The parameter UserCode has not yet been implemented
"""
for user_pool in self.user_pools.values():
if access_token in user_pool.access_tokens:
_, username = user_pool.access_tokens[access_token]

View File

@ -532,9 +532,8 @@ class CognitoIdpResponse(BaseResponse):
def confirm_sign_up(self):
client_id = self._get_param("ClientId")
username = self._get_param("Username")
confirmation_code = self._get_param("ConfirmationCode")
cognitoidp_backends[self.region].confirm_sign_up(
client_id=client_id, username=username, confirmation_code=confirmation_code
client_id=client_id, username=username
)
return ""
@ -556,10 +555,7 @@ class CognitoIdpResponse(BaseResponse):
def verify_software_token(self):
access_token = self._get_param("AccessToken")
user_code = self._get_param("UserCode")
result = cognitoidp_backends[self.region].verify_software_token(
access_token, user_code
)
result = cognitoidp_backends[self.region].verify_software_token(access_token)
return json.dumps(result)
def set_user_mfa_preference(self):
@ -607,5 +603,7 @@ class CognitoIdpJsonWebKeyResponse(BaseResponse):
) as f:
self.json_web_key = f.read()
def serve_json_web_key(self, request, full_url, headers):
def serve_json_web_key(
self, request, full_url, headers
): # pylint: disable=unused-argument
return 200, {"Content-Type": "application/json"}, self.json_web_key

View File

@ -855,6 +855,12 @@ class ConfigBackend(BaseBackend):
self.organization_conformance_packs = {}
self.config_rules = {}
self.config_schema = None
self.region = region
def reset(self):
region = self.region
self.__dict__ = {}
self.__init__(region)
@staticmethod
def default_vpc_endpoint_service(service_region, zones):
@ -896,7 +902,7 @@ class ConfigBackend(BaseBackend):
self.config_schema.shapes["MaximumExecutionFrequency"]["enum"],
)
def put_configuration_aggregator(self, config_aggregator, region):
def put_configuration_aggregator(self, config_aggregator):
# Validate the name:
if len(config_aggregator["ConfigurationAggregatorName"]) > 256:
raise NameTooLongException(
@ -967,7 +973,7 @@ class ConfigBackend(BaseBackend):
):
aggregator = ConfigAggregator(
config_aggregator["ConfigurationAggregatorName"],
region,
self.region,
account_sources=account_sources,
org_source=org_source,
tags=tags,
@ -1037,7 +1043,7 @@ class ConfigBackend(BaseBackend):
del self.config_aggregators[config_aggregator]
def put_aggregation_authorization(
self, current_region, authorized_account, authorized_region, tags
self, authorized_account, authorized_region, tags
):
# Tag validation:
tags = validate_tags(tags or [])
@ -1047,7 +1053,7 @@ class ConfigBackend(BaseBackend):
agg_auth = self.aggregation_authorizations.get(key)
if not agg_auth:
agg_auth = ConfigAggregationAuthorization(
current_region, authorized_account, authorized_region, tags=tags
self.region, authorized_account, authorized_region, tags=tags
)
self.aggregation_authorizations[
"{}/{}".format(authorized_account, authorized_region)
@ -1617,7 +1623,6 @@ class ConfigBackend(BaseBackend):
def put_organization_conformance_pack(
self,
region,
name,
template_s3_uri,
template_body,
@ -1649,7 +1654,7 @@ class ConfigBackend(BaseBackend):
)
else:
pack = OrganizationConformancePack(
region=region,
region=self.region,
name=name,
delivery_s3_bucket=delivery_s3_bucket,
delivery_s3_key_prefix=delivery_s3_key_prefix,
@ -1818,7 +1823,7 @@ class ConfigBackend(BaseBackend):
]
}
def put_config_rule(self, region, config_rule, tags=None):
def put_config_rule(self, config_rule, tags=None):
"""Add/Update config rule for evaluating resource compliance.
TBD - Only the "accounting" of config rules are handled at the
@ -1869,14 +1874,14 @@ class ConfigBackend(BaseBackend):
)
# Update the current rule.
rule.modify_fields(region, config_rule, tags)
rule.modify_fields(self.region, config_rule, tags)
else:
# Create a new ConfigRule if the limit hasn't been reached.
if len(self.config_rules) == ConfigRule.MAX_RULES:
raise MaxNumberOfConfigRulesExceededException(
rule_name, ConfigRule.MAX_RULES
)
rule = ConfigRule(region, config_rule, tags)
rule = ConfigRule(self.region, config_rule, tags)
self.config_rules[rule_name] = rule
return ""

View File

@ -16,7 +16,7 @@ class ConfigResponse(BaseResponse):
def put_configuration_aggregator(self):
aggregator = self.config_backend.put_configuration_aggregator(
json.loads(self.body), self.region
json.loads(self.body)
)
schema = {"ConfigurationAggregator": aggregator}
return json.dumps(schema)
@ -37,7 +37,6 @@ class ConfigResponse(BaseResponse):
def put_aggregation_authorization(self):
agg_auth = self.config_backend.put_aggregation_authorization(
self.region,
self._get_param("AuthorizedAccountId"),
self._get_param("AuthorizedAwsRegion"),
self._get_param("Tags"),
@ -170,7 +169,6 @@ class ConfigResponse(BaseResponse):
def put_organization_conformance_pack(self):
conformance_pack = self.config_backend.put_organization_conformance_pack(
region=self.region,
name=self._get_param("OrganizationConformancePackName"),
template_s3_uri=self._get_param("TemplateS3Uri"),
template_body=self._get_param("TemplateBody"),
@ -223,7 +221,7 @@ class ConfigResponse(BaseResponse):
def put_config_rule(self):
self.config_backend.put_config_rule(
self.region, self._get_param("ConfigRule"), self._get_param("Tags")
self._get_param("ConfigRule"), self._get_param("Tags")
)
return ""

View File

@ -86,7 +86,7 @@ class CallbackResponse(responses.CallbackResponse):
return False
def not_implemented_callback(request):
def not_implemented_callback(request): # pylint: disable=unused-argument
status = 400
headers = {}
response = "The method is not implemented"

View File

@ -64,13 +64,13 @@ class RESTError(HTTPException):
)
self.content_type = "application/xml"
def get_headers(self, *args, **kwargs):
def get_headers(self, *args, **kwargs): # pylint: disable=unused-argument
return {
"X-Amzn-ErrorType": self.error_type or "UnknownError",
"Content-Type": self.content_type,
}
def get_body(self, *args, **kwargs):
def get_body(self, *args, **kwargs): # pylint: disable=unused-argument
return self.description

View File

@ -242,7 +242,7 @@ class MockRawResponse(BytesIO):
response_input = response_input.encode("utf-8")
super().__init__(response_input)
def stream(self, **kwargs):
def stream(self, **kwargs): # pylint: disable=unused-argument
contents = self.read()
while contents:
yield contents
@ -335,7 +335,7 @@ class BotocoreEventMockAWS(BaseMockAWS):
botocore_stubber.reset()
reset_responses_mock(responses_mock)
def enable_patching(self, reset=True):
def enable_patching(self, reset=True): # pylint: disable=unused-argument
botocore_stubber.enabled = True
for method in BOTOCORE_HTTP_METHODS:
for backend in self.backends_for_urls.values():
@ -482,7 +482,7 @@ class InstanceTrackerMeta(type):
class BaseModel(metaclass=InstanceTrackerMeta):
def __new__(cls, *args, **kwargs):
def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
instance = super(BaseModel, cls).__new__(cls)
cls.instances.append(instance)
return instance

View File

@ -811,7 +811,9 @@ class BaseResponse(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
class MotoAPIResponse(BaseResponse):
def reset_response(self, request, full_url, headers):
def reset_response(
self, request, full_url, headers
): # pylint: disable=unused-argument
if request.method == "POST":
from .models import moto_api_backend
@ -819,7 +821,9 @@ class MotoAPIResponse(BaseResponse):
return 200, {}, json.dumps({"status": "ok"})
return 400, {}, json.dumps({"Error": "Need to POST to reset Moto"})
def reset_auth_response(self, request, full_url, headers):
def reset_auth_response(
self, request, full_url, headers
): # pylint: disable=unused-argument
if request.method == "POST":
previous_initial_no_auth_action_count = (
settings.INITIAL_NO_AUTH_ACTION_COUNT
@ -840,7 +844,7 @@ class MotoAPIResponse(BaseResponse):
)
return 400, {}, json.dumps({"Error": "Need to POST to reset Moto Auth"})
def model_data(self, request, full_url, headers):
def model_data(self, request, full_url, headers): # pylint: disable=unused-argument
from moto.core.models import model_data
results = {}
@ -863,7 +867,7 @@ class MotoAPIResponse(BaseResponse):
results[service][name].append(inst_result)
return 200, {"Content-Type": "application/javascript"}, json.dumps(results)
def dashboard(self, request, full_url, headers):
def dashboard(self, request, full_url, headers): # pylint: disable=unused-argument
from flask import render_template
return render_template("dashboard.html")

View File

@ -41,7 +41,10 @@ class DataBrewBackend(BaseBackend):
def list_recipes(self):
return [self.recipes[key] for key in self.recipes] if self.recipes else []
def get_recipe(self, recipe_name, version):
def get_recipe(self, recipe_name):
"""
The Version-parameter has not yet been implemented
"""
if recipe_name not in self.recipes:
raise RecipeNotFoundException(recipe_name)
return self.recipes[recipe_name]

View File

@ -59,11 +59,7 @@ class DataBrewResponse(BaseResponse):
recipe_name = parsed_url.path.rstrip("/").rsplit("/", 1)[1]
try:
return json.dumps(
self.databrew_backend.get_recipe(recipe_name, None).as_dict()
)
recipe = self.databrew_backend.get_recipe(recipe_name)
return json.dumps(recipe.as_dict())
except DataBrewClientError as e:
return e.code, e.get_headers(), e.get_body()
# 'DescribeRecipe'

View File

@ -104,6 +104,7 @@ class Pipeline(CloudFormationModel):
class DataPipelineBackend(BaseBackend):
def __init__(self, region=None):
self.pipelines = OrderedDict()
self.region = region
def create_pipeline(self, name, unique_id, **kwargs):
pipeline = Pipeline(name, unique_id, **kwargs)

View File

@ -186,16 +186,9 @@ class DAXBackend(BaseBackend):
node_type,
description,
replication_factor,
availability_zones,
subnet_group_name,
security_group_ids,
preferred_maintenance_window,
notification_topic_arn,
iam_role_arn,
parameter_group_name,
tags,
sse_specification,
cluster_endpoint_encryption_type,
):
"""
The following parameters are not yet processed:
@ -247,9 +240,10 @@ class DAXBackend(BaseBackend):
raise ClusterNotFoundFault()
return self._tagger.list_tags_for_resource(self.clusters[name].arn)
def increase_replication_factor(
self, cluster_name, new_replication_factor, availability_zones
):
def increase_replication_factor(self, cluster_name, new_replication_factor):
"""
The AvailabilityZones-parameter is not yet implemented
"""
if cluster_name not in self.clusters:
raise ClusterNotFoundFault()
self.clusters[cluster_name].increase_replication_factor(new_replication_factor)
@ -259,7 +253,6 @@ class DAXBackend(BaseBackend):
self,
cluster_name,
new_replication_factor,
availability_zones,
node_ids_to_remove,
):
"""

View File

@ -17,16 +17,9 @@ class DAXResponse(BaseResponse):
node_type = params.get("NodeType")
description = params.get("Description")
replication_factor = params.get("ReplicationFactor")
availability_zones = params.get("AvailabilityZones")
subnet_group_name = params.get("SubnetGroupName")
security_group_ids = params.get("SecurityGroupIds")
preferred_maintenance_window = params.get("PreferredMaintenanceWindow")
notification_topic_arn = params.get("NotificationTopicArn")
iam_role_arn = params.get("IamRoleArn")
parameter_group_name = params.get("ParameterGroupName")
tags = params.get("Tags", [])
sse_specification = params.get("SSESpecification", {})
cluster_endpoint_encryption_type = params.get("ClusterEndpointEncryptionType")
self._validate_arn(iam_role_arn)
self._validate_name(cluster_name)
@ -36,16 +29,9 @@ class DAXResponse(BaseResponse):
node_type=node_type,
description=description,
replication_factor=replication_factor,
availability_zones=availability_zones,
subnet_group_name=subnet_group_name,
security_group_ids=security_group_ids,
preferred_maintenance_window=preferred_maintenance_window,
notification_topic_arn=notification_topic_arn,
iam_role_arn=iam_role_arn,
parameter_group_name=parameter_group_name,
tags=tags,
sse_specification=sse_specification,
cluster_endpoint_encryption_type=cluster_endpoint_encryption_type,
)
return json.dumps(dict(Cluster=cluster.to_json()))
@ -106,11 +92,8 @@ class DAXResponse(BaseResponse):
params = json.loads(self.body)
cluster_name = params.get("ClusterName")
new_replication_factor = params.get("NewReplicationFactor")
availability_zones = params.get("AvailabilityZones")
cluster = self.dax_backend.increase_replication_factor(
cluster_name=cluster_name,
new_replication_factor=new_replication_factor,
availability_zones=availability_zones,
cluster_name=cluster_name, new_replication_factor=new_replication_factor
)
return json.dumps({"Cluster": cluster.to_json()})
@ -118,12 +101,10 @@ class DAXResponse(BaseResponse):
params = json.loads(self.body)
cluster_name = params.get("ClusterName")
new_replication_factor = params.get("NewReplicationFactor")
availability_zones = params.get("AvailabilityZones")
node_ids_to_remove = params.get("NodeIdsToRemove")
cluster = self.dax_backend.decrease_replication_factor(
cluster_name=cluster_name,
new_replication_factor=new_replication_factor,
availability_zones=availability_zones,
node_ids_to_remove=node_ids_to_remove,
)
return json.dumps({"Cluster": cluster.to_json()})

View File

@ -39,13 +39,11 @@ class DatabaseMigrationServiceBackend(BaseBackend):
migration_type,
table_mappings,
replication_task_settings,
cdc_start_time,
cdc_start_position,
cdc_stop_position,
tags,
task_data,
resource_identifier,
):
"""
The following parameters are not yet implemented:
CDCStartTime, CDCStartPosition, CDCStopPosition, Tags, TaskData, ResourceIdentifier
"""
replication_task = FakeReplicationTask(
replication_task_identifier=replication_task_identifier,
source_endpoint_arn=source_endpoint_arn,
@ -66,14 +64,11 @@ class DatabaseMigrationServiceBackend(BaseBackend):
return replication_task
def start_replication_task(
self,
replication_task_arn,
start_replication_task_type,
cdc_start_time,
cdc_start_position,
cdc_stop_position,
):
def start_replication_task(self, replication_task_arn):
"""
The following parameters have not yet been implemented:
StartReplicationTaskType, CDCStartTime, CDCStartPosition, CDCStopPosition
"""
if not self.replication_tasks.get(replication_task_arn):
raise ResourceNotFoundFault("Replication task could not be found.")
@ -95,13 +90,16 @@ class DatabaseMigrationServiceBackend(BaseBackend):
return task
def describe_replication_tasks(self, filters, max_records, without_settings):
def describe_replication_tasks(self, filters, max_records):
"""
The parameter WithoutSettings has not yet been implemented
"""
replication_tasks = filter_tasks(self.replication_tasks.values(), filters)
if max_records and max_records > 0:
replication_tasks = replication_tasks[:max_records]
return None, replication_tasks
return replication_tasks
class FakeReplicationTask(BaseModel):

View File

@ -10,8 +10,6 @@ class DatabaseMigrationServiceResponse(BaseResponse):
def dms_backend(self):
return dms_backends[self.region]
# add methods from here
def create_replication_task(self):
replication_task_identifier = self._get_param("ReplicationTaskIdentifier")
source_endpoint_arn = self._get_param("SourceEndpointArn")
@ -20,12 +18,6 @@ class DatabaseMigrationServiceResponse(BaseResponse):
migration_type = self._get_param("MigrationType")
table_mappings = self._get_param("TableMappings")
replication_task_settings = self._get_param("ReplicationTaskSettings")
cdc_start_time = self._get_param("CdcStartTime")
cdc_start_position = self._get_param("CdcStartPosition")
cdc_stop_position = self._get_param("CdcStopPosition")
tags = self._get_list_prefix("Tags.member")
task_data = self._get_param("TaskData")
resource_identifier = self._get_param("ResourceIdentifier")
replication_task = self.dms_backend.create_replication_task(
replication_task_identifier=replication_task_identifier,
source_endpoint_arn=source_endpoint_arn,
@ -34,34 +26,18 @@ class DatabaseMigrationServiceResponse(BaseResponse):
migration_type=migration_type,
table_mappings=table_mappings,
replication_task_settings=replication_task_settings,
cdc_start_time=cdc_start_time,
cdc_start_position=cdc_start_position,
cdc_stop_position=cdc_stop_position,
tags=tags,
task_data=task_data,
resource_identifier=resource_identifier,
)
return json.dumps({"ReplicationTask": replication_task.to_dict()})
def start_replication_task(self):
replication_task_arn = self._get_param("ReplicationTaskArn")
start_replication_task_type = self._get_param("StartReplicationTaskType")
cdc_start_time = self._get_param("CdcStartTime")
cdc_start_position = self._get_param("CdcStartPosition")
cdc_stop_position = self._get_param("CdcStopPosition")
replication_task = self.dms_backend.start_replication_task(
replication_task_arn=replication_task_arn,
start_replication_task_type=start_replication_task_type,
cdc_start_time=cdc_start_time,
cdc_start_position=cdc_start_position,
cdc_stop_position=cdc_stop_position,
replication_task_arn=replication_task_arn
)
return json.dumps({"ReplicationTask": replication_task.to_dict()})
# add templates from here
def stop_replication_task(self):
replication_task_arn = self._get_param("ReplicationTaskArn")
replication_task = self.dms_backend.stop_replication_task(
@ -81,14 +57,10 @@ class DatabaseMigrationServiceResponse(BaseResponse):
def describe_replication_tasks(self):
filters = self._get_list_prefix("Filters.member")
max_records = self._get_int_param("MaxRecords")
marker = self._get_param("Marker")
without_settings = self._get_param("WithoutSettings")
marker, replication_tasks = self.dms_backend.describe_replication_tasks(
filters=filters, max_records=max_records, without_settings=without_settings
replication_tasks = self.dms_backend.describe_replication_tasks(
filters=filters, max_records=max_records
)
return json.dumps(
dict(
marker=marker, ReplicationTasks=[t.to_dict() for t in replication_tasks]
)
dict(ReplicationTasks=[t.to_dict() for t in replication_tasks])
)

View File

@ -35,9 +35,9 @@ from moto.dynamodb.limits import HASH_KEY_MAX_LENGTH, RANGE_KEY_MAX_LENGTH
class DynamoJsonEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, "to_json"):
return obj.to_json()
def default(self, o):
if hasattr(o, "to_json"):
return o.to_json()
def dynamo_json_dump(dynamo_object):
@ -483,8 +483,8 @@ class Table(CloudFormationModel):
return ebs_key.arn
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn", "StreamArn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn", "StreamArn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException

View File

@ -52,7 +52,7 @@ class NestableExpressionParserMixin(object):
in the originating expression.
"""
def __init__(self, *args, **kwargs):
def __init__(self):
self.target_clauses = deque()
def _parse_target_clause(self, factory_class):

View File

@ -10,9 +10,9 @@ from .comparisons import get_comparison_func
class DynamoJsonEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, "to_json"):
return obj.to_json()
def default(self, o):
if hasattr(o, "to_json"):
return o.to_json()
def dynamo_json_dump(dynamo_object):
@ -297,8 +297,8 @@ class Table(CloudFormationModel):
return item
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["StreamArn"]
def has_cfn_attr(cls, attr):
return attr in ["StreamArn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException

View File

@ -277,10 +277,9 @@ class MalformedAMIIdError(EC2ClientError):
class InvalidSnapshotIdError(EC2ClientError):
def __init__(self, snapshot_id):
super().__init__(
"InvalidSnapshot.NotFound", ""
) # Note: AWS returns empty message for this, as of 2014.08.22.
def __init__(self):
# Note: AWS returns empty message for this, as of 2014.08.22.
super().__init__("InvalidSnapshot.NotFound", "")
class InvalidSnapshotInUse(EC2ClientError):

View File

@ -439,8 +439,8 @@ class NetworkInterface(TaggedEC2Resource, CloudFormationModel):
return self._group_set
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["PrimaryPrivateIpAddress", "SecondaryPrivateIpAddresses"]
def has_cfn_attr(cls, attr):
return attr in ["PrimaryPrivateIpAddress", "SecondaryPrivateIpAddresses"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -902,7 +902,7 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel):
def physical_resource_id(self):
return self.id
def start(self, *args, **kwargs):
def start(self):
for nic in self.nics.values():
nic.start()
@ -912,7 +912,7 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel):
self._reason = ""
self._state_reason = StateReason()
def stop(self, *args, **kwargs):
def stop(self):
for nic in self.nics.values():
nic.stop()
@ -927,10 +927,10 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel):
"Client.UserInitiatedShutdown",
)
def delete(self, region):
def delete(self, region): # pylint: disable=unused-argument
self.terminate()
def terminate(self, *args, **kwargs):
def terminate(self):
for nic in self.nics.values():
nic.stop()
@ -969,7 +969,7 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel):
].id
)
def reboot(self, *args, **kwargs):
def reboot(self):
self._state.name = "running"
self._state.code = 16
@ -1072,8 +1072,8 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel):
eni.device_index = None
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in [
def has_cfn_attr(cls, attr):
return attr in [
"AvailabilityZone",
"PrivateDnsName",
"PublicDnsName",
@ -1734,7 +1734,6 @@ class AmiBackend(object):
instance_id,
name=None,
description=None,
context=None,
tag_specifications=None,
):
# TODO: check that instance exists and pull info from it.
@ -1781,9 +1780,7 @@ class AmiBackend(object):
self.amis[ami_id] = ami
return ami
def describe_images(
self, ami_ids=(), filters=None, exec_users=None, owners=None, context=None
):
def describe_images(self, ami_ids=(), filters=None, exec_users=None, owners=None):
images = self.amis.copy().values()
if len(ami_ids):
@ -2427,7 +2424,7 @@ class SecurityGroup(TaggedEC2Resource, CloudFormationModel):
if security_group:
security_group.delete(region_name)
def delete(self, region_name):
def delete(self, region_name): # pylint: disable=unused-argument
"""Not exposed as part of the ELB API - used for CloudFormation."""
self.ec2_backend.delete_security_group(group_id=self.id)
@ -2599,8 +2596,8 @@ class SecurityGroup(TaggedEC2Resource, CloudFormationModel):
return True
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["GroupId"]
def has_cfn_attr(cls, attr):
return attr in ["GroupId"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -3606,8 +3603,7 @@ class EBSBackend(object):
if snapshot_ids:
matches = [snap for snap in matches if snap.id in snapshot_ids]
if len(snapshot_ids) > len(matches):
unknown_ids = set(snapshot_ids) - set(matches)
raise InvalidSnapshotIdError(unknown_ids)
raise InvalidSnapshotIdError()
if filters:
matches = generic_filter(filters, matches)
return matches
@ -3630,7 +3626,7 @@ class EBSBackend(object):
def get_snapshot(self, snapshot_id):
snapshot = self.snapshots.get(snapshot_id, None)
if not snapshot:
raise InvalidSnapshotIdError(snapshot_id)
raise InvalidSnapshotIdError()
return snapshot
def delete_snapshot(self, snapshot_id):
@ -3639,7 +3635,7 @@ class EBSBackend(object):
if snapshot.from_ami and snapshot.from_ami in self.amis:
raise InvalidSnapshotInUse(snapshot_id, snapshot.from_ami)
return self.snapshots.pop(snapshot_id)
raise InvalidSnapshotIdError(snapshot_id)
raise InvalidSnapshotIdError()
def get_create_volume_permission_groups(self, snapshot_id):
snapshot = self.get_snapshot(snapshot_id)
@ -4625,8 +4621,8 @@ class Subnet(TaggedEC2Resource, CloudFormationModel):
return super().get_filter_value(filter_name, "DescribeSubnets")
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["AvailabilityZone"]
def has_cfn_attr(cls, attr):
return attr in ["AvailabilityZone"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -4713,7 +4709,6 @@ class SubnetBackend(object):
ipv6_cidr_block=None,
availability_zone=None,
availability_zone_id=None,
context=None,
tags=None,
):
subnet_id = random_subnet_id()
@ -6032,7 +6027,7 @@ class EgressOnlyInternetGatewayBackend(object):
self.egress_only_internet_gateway_backend[egress_only_igw.id] = egress_only_igw
return egress_only_igw
def describe_egress_only_internet_gateways(self, ids=None, filters=None):
def describe_egress_only_internet_gateways(self, ids=None):
"""
The Filters-argument is not yet supported
"""
@ -6659,8 +6654,8 @@ class ElasticAddress(TaggedEC2Resource, CloudFormationModel):
return self.public_ip
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["AllocationId"]
def has_cfn_attr(cls, attr):
return attr in ["AllocationId"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -7997,7 +7992,7 @@ class TransitGatewayAttachmentBackend(object):
return transit_gateway_vpc_attachment
def describe_transit_gateway_attachments(
self, transit_gateways_attachment_ids=None, filters=None, max_results=0
self, transit_gateways_attachment_ids=None, filters=None
):
transit_gateway_attachments = list(self.transit_gateway_attachments.values())
@ -8023,7 +8018,7 @@ class TransitGatewayAttachmentBackend(object):
return result
def describe_transit_gateway_vpc_attachments(
self, transit_gateways_attachment_ids=None, filters=None, max_results=0
self, transit_gateways_attachment_ids=None, filters=None
):
transit_gateway_attachments = list(self.transit_gateway_attachments.values())
@ -8131,7 +8126,7 @@ class TransitGatewayAttachmentBackend(object):
return transit_gateway_peering_attachment
def describe_transit_gateway_peering_attachments(
self, transit_gateways_attachment_ids=None, filters=None, max_results=0
self, transit_gateways_attachment_ids=None, filters=None
):
transit_gateway_attachments = list(self.transit_gateway_attachments.values())

View File

@ -13,7 +13,6 @@ class AmisResponse(BaseResponse):
instance_id,
name,
description,
context=self,
tag_specifications=tag_specifications,
)
template = self.response_template(CREATE_IMAGE_RESPONSE)
@ -45,11 +44,7 @@ class AmisResponse(BaseResponse):
owners = self._get_multi_param("Owner")
exec_users = self._get_multi_param("ExecutableBy")
images = self.ec2_backend.describe_images(
ami_ids=ami_ids,
filters=filters,
exec_users=exec_users,
owners=owners,
context=self,
ami_ids=ami_ids, filters=filters, exec_users=exec_users, owners=owners
)
template = self.response_template(DESCRIBE_IMAGES_RESPONSE)
return template.render(images=images)

View File

@ -1,5 +1,5 @@
from moto.core.responses import BaseResponse
from moto.ec2.utils import filters_from_querystring, add_tag_specification
from moto.ec2.utils import add_tag_specification
class EgressOnlyInternetGateway(BaseResponse):
@ -16,9 +16,8 @@ class EgressOnlyInternetGateway(BaseResponse):
def describe_egress_only_internet_gateways(self):
egress_only_igw_ids = self._get_multi_param("EgressOnlyInternetGatewayId")
filters = filters_from_querystring(self.querystring)
egress_only_igws = self.ec2_backend.describe_egress_only_internet_gateways(
egress_only_igw_ids, filters
egress_only_igw_ids
)
template = self.response_template(DESCRIBE_EGRESS_ONLY_IGW_RESPONSE)
return template.render(egress_only_igws=egress_only_igws)

View File

@ -25,7 +25,6 @@ class Subnets(BaseResponse):
ipv6_cidr_block,
availability_zone,
availability_zone_id,
context=self,
tags=tags,
)
template = self.response_template(CREATE_SUBNET_RESPONSE)

View File

@ -31,12 +31,10 @@ class TransitGatewayAttachment(BaseResponse):
"TransitGatewayAttachmentIds"
)
filters = filters_from_querystring(self.querystring)
max_results = self._get_param("MaxResults")
transit_gateway_vpc_attachments = (
self.ec2_backend.describe_transit_gateway_vpc_attachments(
transit_gateways_attachment_ids=transit_gateways_attachment_ids,
filters=filters,
max_results=max_results,
)
)
template = self.response_template(DESCRIBE_TRANSIT_GATEWAY_VPC_ATTACHMENTS)
@ -66,12 +64,10 @@ class TransitGatewayAttachment(BaseResponse):
"TransitGatewayAttachmentIds"
)
filters = filters_from_querystring(self.querystring)
max_results = self._get_param("MaxResults")
transit_gateway_attachments = (
self.ec2_backend.describe_transit_gateway_attachments(
transit_gateways_attachment_ids=transit_gateways_attachment_ids,
filters=filters,
max_results=max_results,
)
)
template = self.response_template(DESCRIBE_TRANSIT_GATEWAY_ATTACHMENTS)
@ -159,12 +155,10 @@ class TransitGatewayAttachment(BaseResponse):
"TransitGatewayAttachmentIds"
)
filters = filters_from_querystring(self.querystring)
max_results = self._get_param("MaxResults")
transit_gateway_peering_attachments = (
self.ec2_backend.describe_transit_gateway_peering_attachments(
transit_gateways_attachment_ids=transit_gateways_attachment_ids,
filters=filters,
max_results=max_results,
)
)
template = self.response_template(DESCRIBE_TRANSIT_GATEWAY_PEERING_ATTACHMENTS)

View File

@ -153,8 +153,8 @@ class Repository(BaseObject, CloudFormationModel):
ecr_backend.delete_repository(self.name)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn", "RepositoryUri"]
def has_cfn_attr(cls, attr):
return attr in ["Arn", "RepositoryUri"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -505,13 +505,10 @@ class ECRBackend(BaseBackend):
existing_images[0].update_tag(image_tag)
return existing_images[0]
def batch_get_image(
self,
repository_name,
registry_id=None,
image_ids=None,
accepted_media_types=None,
):
def batch_get_image(self, repository_name, registry_id=None, image_ids=None):
"""
The parameter AcceptedMediaTypes has not yet been implemented
"""
if repository_name in self.repositories:
repository = self.repositories[repository_name]
else:

View File

@ -19,8 +19,8 @@ class ECRResponse(BaseResponse):
except ValueError:
return {}
def _get_param(self, param, if_none=None):
return self.request_params.get(param, if_none)
def _get_param(self, param_name, if_none=None):
return self.request_params.get(param_name, if_none)
def create_repository(self):
repository_name = self._get_param("repositoryName")
@ -106,10 +106,9 @@ class ECRResponse(BaseResponse):
repository_str = self._get_param("repositoryName")
registry_id = self._get_param("registryId")
image_ids = self._get_param("imageIds")
accepted_media_types = self._get_param("acceptedMediaTypes")
response = self.ecr_backend.batch_get_image(
repository_str, registry_id, image_ids, accepted_media_types
repository_str, registry_id, image_ids
)
return json.dumps(response)

View File

@ -122,8 +122,8 @@ class Cluster(BaseObject, CloudFormationModel):
return original_resource
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -484,8 +484,8 @@ class Service(BaseObject, CloudFormationModel):
)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Name"]
def has_cfn_attr(cls, attr):
return attr in ["Name"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -1469,9 +1469,10 @@ class EC2ContainerServiceBackend(BaseBackend):
cluster_name=None,
attr_name=None,
attr_value=None,
max_results=None,
next_token=None,
):
"""
Pagination is not yet implemented
"""
if target_type != "container-instance":
raise JsonRESTError(
"InvalidParameterException", "targetType must be container-instance"
@ -1554,9 +1555,10 @@ class EC2ContainerServiceBackend(BaseBackend):
"TargetNotFoundException", "Could not find {0}".format(target_id)
)
def list_task_definition_families(
self, family_prefix=None, status=None, max_results=None, next_token=None
):
def list_task_definition_families(self, family_prefix=None):
"""
The Status and pagination parameters are not yet implemented
"""
for task_fam in self.task_definitions:
if family_prefix is not None and not task_fam.startswith(family_prefix):
continue
@ -1720,7 +1722,10 @@ class EC2ContainerServiceBackend(BaseBackend):
return task_set_results
def delete_task_set(self, cluster, service, task_set, force=False):
def delete_task_set(self, cluster, service, task_set):
"""
The Force-parameter is not yet implemented
"""
cluster_name = cluster.split("/")[-1]
service_name = service.split("/")[-1]

View File

@ -22,8 +22,8 @@ class EC2ContainerServiceResponse(BaseResponse):
except ValueError:
return {}
def _get_param(self, param, if_none=None):
return self.request_params.get(param, if_none)
def _get_param(self, param_name, if_none=None):
return self.request_params.get(param_name, if_none)
def create_cluster(self):
cluster_name = self._get_param("clusterName")
@ -319,11 +319,9 @@ class EC2ContainerServiceResponse(BaseResponse):
attr_name = self._get_param("attributeName")
attr_value = self._get_param("attributeValue")
target_type = self._get_param("targetType")
max_results = self._get_param("maxResults")
next_token = self._get_param("nextToken")
results = self.ecs_backend.list_attributes(
target_type, cluster_name, attr_name, attr_value, max_results, next_token
target_type, cluster_name, attr_name, attr_value
)
# Result will be [item will be {0 cluster_name, 1 arn, 2 name, 3 value}]
@ -355,13 +353,7 @@ class EC2ContainerServiceResponse(BaseResponse):
def list_task_definition_families(self):
family_prefix = self._get_param("familyPrefix")
status = self._get_param("status")
max_results = self._get_param("maxResults")
next_token = self._get_param("nextToken")
results = self.ecs_backend.list_task_definition_families(
family_prefix, status, max_results, next_token
)
results = self.ecs_backend.list_task_definition_families(family_prefix)
return json.dumps({"families": list(results)})
@ -432,10 +424,7 @@ class EC2ContainerServiceResponse(BaseResponse):
cluster_str = self._get_param("cluster")
service_str = self._get_param("service")
task_set = self._get_param("taskSet")
force = self._get_param("force")
task_set = self.ecs_backend.delete_task_set(
cluster_str, service_str, task_set, force
)
task_set = self.ecs_backend.delete_task_set(cluster_str, service_str, task_set)
return json.dumps({"taskSet": task_set.response_object})
def update_task_set(self):

View File

@ -4,7 +4,7 @@ from moto.core.exceptions import AWSError
class EKSError(AWSError):
def __init__(self, *args, **kwargs):
def __init__(self, **kwargs):
super(AWSError, self).__init__()
self.description = json.dumps(kwargs)
self.headers = {"status": self.STATUS, "x-amzn-ErrorType": self.TYPE}

View File

@ -78,11 +78,13 @@ class ElasticTranscoderBackend(BaseBackend):
input_bucket,
output_bucket,
role,
aws_kms_key_arn,
notifications,
content_config,
thumbnail_config,
):
"""
The following parameters are not yet implemented:
AWSKMSKeyArn, Notifications
"""
pipeline = Pipeline(
self.region_name,
name,
@ -102,17 +104,11 @@ class ElasticTranscoderBackend(BaseBackend):
def read_pipeline(self, pipeline_id):
return self.pipelines[pipeline_id]
def update_pipeline(
self,
pipeline_id,
name,
input_bucket,
role,
aws_kms_key_arn,
notifications,
content_config,
thumbnail_config,
):
def update_pipeline(self, pipeline_id, name, input_bucket, role):
"""
The following parameters are not yet implemented:
AWSKMSKeyArn, Notifications, ContentConfig, ThumbnailConfig
"""
pipeline = self.read_pipeline(pipeline_id)
pipeline.update(name, input_bucket, role)
warnings = []

View File

@ -35,8 +35,6 @@ class ElasticTranscoderResponse(BaseResponse):
input_bucket = self._get_param("InputBucket")
output_bucket = self._get_param("OutputBucket")
role = self._get_param("Role")
aws_kms_key_arn = self._get_param("AwsKmsKeyArn")
notifications = self._get_param("Notifications")
content_config = self._get_param("ContentConfig")
thumbnail_config = self._get_param("ThumbnailConfig")
if not role:
@ -58,8 +56,6 @@ class ElasticTranscoderResponse(BaseResponse):
input_bucket=input_bucket,
output_bucket=output_bucket,
role=role,
aws_kms_key_arn=aws_kms_key_arn,
notifications=notifications,
content_config=content_config,
thumbnail_config=thumbnail_config,
)
@ -109,22 +105,11 @@ class ElasticTranscoderResponse(BaseResponse):
name = self._get_param("Name")
input_bucket = self._get_param("InputBucket")
role = self._get_param("Role")
aws_kms_key_arn = self._get_param("AwsKmsKeyArn")
notifications = self._get_param("Notifications")
content_config = self._get_param("ContentConfig")
thumbnail_config = self._get_param("ThumbnailConfig")
err = self.validate_pipeline_id(_id)
if err:
return err
pipeline, warnings = self.elastictranscoder_backend.update_pipeline(
pipeline_id=_id,
name=name,
input_bucket=input_bucket,
role=role,
aws_kms_key_arn=aws_kms_key_arn,
notifications=notifications,
content_config=content_config,
thumbnail_config=thumbnail_config,
pipeline_id=_id, name=name, input_bucket=input_bucket, role=role
)
return (

View File

@ -207,8 +207,8 @@ class FakeLoadBalancer(CloudFormationModel):
return self.name
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in [
def has_cfn_attr(cls, attr):
return attr in [
"CanonicalHostedZoneName",
"CanonicalHostedZoneNameID",
"DNSName",

View File

@ -131,7 +131,7 @@ class ActionTargetGroupNotFoundError(ELBClientError):
class ListenerOrBalancerMissingError(ELBClientError):
def __init__(self, arn):
def __init__(self):
super().__init__(
"ValidationError",
"You must specify either listener ARNs or a load balancer ARN",

View File

@ -515,8 +515,8 @@ class FakeLoadBalancer(CloudFormationModel):
return load_balancer
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in [
def has_cfn_attr(cls, attr):
return attr in [
"DNSName",
"LoadBalancerName",
"CanonicalHostedZoneID",

View File

@ -670,14 +670,8 @@ class ElasticMapReduceBackend(BaseBackend):
instance_group.auto_scaling_policy = auto_scaling_policy
return instance_group
def remove_auto_scaling_policy(self, cluster_id, instance_group_id):
instance_groups = self.get_instance_groups(
instance_group_ids=[instance_group_id]
)
if len(instance_groups) == 0:
return None
instance_group = instance_groups[0]
instance_group.auto_scaling_policy = None
def remove_auto_scaling_policy(self, instance_group_id):
self.put_auto_scaling_policy(instance_group_id, auto_scaling_policy=None)
def create_security_configuration(self, name, security_configuration):
if name in self.security_configurations:

View File

@ -532,7 +532,7 @@ class ElasticMapReduceResponse(BaseResponse):
def remove_auto_scaling_policy(self):
cluster_id = self._get_param("ClusterId")
instance_group_id = self._get_param("InstanceGroupId")
instance_group = self.backend.put_auto_scaling_policy(instance_group_id, None)
instance_group = self.backend.remove_auto_scaling_policy(instance_group_id)
template = self.response_template(REMOVE_AUTO_SCALING_POLICY)
return template.render(cluster_id=cluster_id, instance_group=instance_group)

View File

@ -14,15 +14,15 @@ def random_id(size=13):
return "".join(str(random.choice(chars)) for x in range(size))
def random_cluster_id(size=13):
def random_cluster_id():
return "j-{0}".format(random_id())
def random_step_id(size=13):
def random_step_id():
return "s-{0}".format(random_id())
def random_instance_group_id(size=13):
def random_instance_group_id():
return "i-{0}".format(random_id())

View File

@ -22,7 +22,6 @@ class Domain(BaseModel):
domain_endpoint_options,
advanced_security_options,
auto_tune_options,
tag_list,
):
self.domain_id = get_random_hex(8)
self.region_name = region_name
@ -104,7 +103,6 @@ class ElasticsearchServiceBackend(BaseBackend):
domain_endpoint_options,
advanced_security_options,
auto_tune_options,
tag_list,
):
# TODO: Persist/Return other attributes
new_domain = Domain(
@ -124,7 +122,6 @@ class ElasticsearchServiceBackend(BaseBackend):
domain_endpoint_options=domain_endpoint_options,
advanced_security_options=advanced_security_options,
auto_tune_options=auto_tune_options,
tag_list=tag_list,
)
self.domains[domain_name] = new_domain
return new_domain.to_json()
@ -139,7 +136,7 @@ class ElasticsearchServiceBackend(BaseBackend):
raise DomainNotFound(domain_name)
return self.domains[domain_name].to_json()
def list_domain_names(self, engine_type):
def list_domain_names(self):
"""
The engine-type parameter is not yet supported.
Pagination is not yet implemented.

View File

@ -71,7 +71,6 @@ class ElasticsearchServiceResponse(BaseResponse):
domain_endpoint_options = params.get("DomainEndpointOptions")
advanced_security_options = params.get("AdvancedSecurityOptions")
auto_tune_options = params.get("AutoTuneOptions")
tag_list = params.get("TagList")
domain_status = self.es_backend.create_elasticsearch_domain(
domain_name=domain_name,
elasticsearch_version=elasticsearch_version,
@ -88,7 +87,6 @@ class ElasticsearchServiceResponse(BaseResponse):
domain_endpoint_options=domain_endpoint_options,
advanced_security_options=advanced_security_options,
auto_tune_options=auto_tune_options,
tag_list=tag_list,
)
return 200, {}, json.dumps({"DomainStatus": domain_status})
@ -107,7 +105,5 @@ class ElasticsearchServiceResponse(BaseResponse):
return 200, {}, json.dumps({"DomainStatus": domain_status})
def list_domain_names(self):
params = self._get_params()
engine_type = params.get("EngineType")
domain_names = self.es_backend.list_domain_names(engine_type=engine_type)
domain_names = self.es_backend.list_domain_names()
return 200, {}, json.dumps({"DomainNames": domain_names})

View File

@ -188,7 +188,7 @@ class Rule(CloudFormationModel):
logs_backends[self.region_name].create_log_stream(name, log_stream_name)
logs_backends[self.region_name].put_log_events(
name, log_stream_name, log_events, None
name, log_stream_name, log_events
)
def _send_to_events_archive(self, resource_id, event):
@ -223,8 +223,8 @@ class Rule(CloudFormationModel):
)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -342,8 +342,8 @@ class EventBus(CloudFormationModel):
event_backend.delete_event_bus(name=self.name)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn", "Name", "Policy"]
def has_cfn_attr(cls, attr):
return attr in ["Arn", "Name", "Policy"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -544,8 +544,8 @@ class Archive(CloudFormationModel):
event_backend.archives.pop(self.name)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn", "ArchiveName"]
def has_cfn_attr(cls, attr):
return attr in ["Arn", "ArchiveName"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException

View File

@ -24,8 +24,8 @@ class EventsHandler(BaseResponse):
self._json_body = {}
return self._json_body
def _get_param(self, param, if_none=None):
return self.request_params.get(param, if_none)
def _get_param(self, param_name, if_none=None):
return self.request_params.get(param_name, if_none)
def _create_response(self, result):
"""

View File

@ -1,5 +1,4 @@
import json
from urllib.parse import urlparse, parse_qs
from moto.core.responses import BaseResponse
from .models import glacier_backends
@ -13,9 +12,9 @@ class GlacierResponse(BaseResponse):
def all_vault_response(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
return self._all_vault_response(request, full_url, headers)
return self._all_vault_response(headers)
def _all_vault_response(self, request, full_url, headers):
def _all_vault_response(self, headers):
vaults = self.glacier_backend.list_vaults()
response = json.dumps(
{"Marker": None, "VaultList": [vault.to_dict() for vault in vaults]}
@ -30,27 +29,25 @@ class GlacierResponse(BaseResponse):
def _vault_response(self, request, full_url, headers):
method = request.method
parsed_url = urlparse(full_url)
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
vault_name = vault_from_glacier_url(full_url)
if method == "GET":
return self._vault_response_get(vault_name, querystring, headers)
return self._vault_response_get(vault_name, headers)
elif method == "PUT":
return self._vault_response_put(vault_name, querystring, headers)
return self._vault_response_put(vault_name, headers)
elif method == "DELETE":
return self._vault_response_delete(vault_name, querystring, headers)
return self._vault_response_delete(vault_name, headers)
def _vault_response_get(self, vault_name, querystring, headers):
def _vault_response_get(self, vault_name, headers):
vault = self.glacier_backend.get_vault(vault_name)
headers["content-type"] = "application/json"
return 200, headers, json.dumps(vault.to_dict())
def _vault_response_put(self, vault_name, querystring, headers):
def _vault_response_put(self, vault_name, headers):
self.glacier_backend.create_vault(vault_name)
return 201, headers, ""
def _vault_response_delete(self, vault_name, querystring, headers):
def _vault_response_delete(self, vault_name, headers):
self.glacier_backend.delete_vault(vault_name)
return 204, headers, ""
@ -66,20 +63,16 @@ class GlacierResponse(BaseResponse):
description = ""
if "x-amz-archive-description" in request.headers:
description = request.headers["x-amz-archive-description"]
parsed_url = urlparse(full_url)
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
vault_name = full_url.split("/")[-2]
if method == "POST":
return self._vault_archive_response_post(
vault_name, body, description, querystring, headers
vault_name, body, description, headers
)
else:
return 400, headers, "400 Bad Request"
def _vault_archive_response_post(
self, vault_name, body, description, querystring, headers
):
def _vault_archive_response_post(self, vault_name, body, description, headers):
vault = self.glacier_backend.upload_archive(vault_name, body, description)
headers["x-amz-archive-id"] = vault["archive_id"]
headers["x-amz-sha256-tree-hash"] = vault["sha256"]
@ -143,9 +136,9 @@ class GlacierResponse(BaseResponse):
def vault_jobs_individual_response(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
return self._vault_jobs_individual_response(request, full_url, headers)
return self._vault_jobs_individual_response(full_url, headers)
def _vault_jobs_individual_response(self, request, full_url, headers):
def _vault_jobs_individual_response(self, full_url, headers):
vault_name = full_url.split("/")[-3]
archive_id = full_url.split("/")[-1]
@ -154,9 +147,9 @@ class GlacierResponse(BaseResponse):
def vault_jobs_output_response(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
return self._vault_jobs_output_response(request, full_url, headers)
return self._vault_jobs_output_response(full_url, headers)
def _vault_jobs_output_response(self, request, full_url, headers):
def _vault_jobs_output_response(self, full_url, headers):
vault_name = full_url.split("/")[-4]
job_id = full_url.split("/")[-2]
output = self.glacier_backend.get_job_output(vault_name, job_id)

View File

@ -16,9 +16,7 @@ class GuardDutyBackend(BaseBackend):
self.__dict__ = {}
self.__init__(region_name)
def create_detector(
self, enable, client_token, finding_publishing_frequency, data_sources, tags
):
def create_detector(self, enable, finding_publishing_frequency, data_sources, tags):
if finding_publishing_frequency not in [
"FIFTEEN_MINUTES",
"ONE_HOUR",

View File

@ -22,13 +22,12 @@ class GuardDutyResponse(BaseResponse):
def create_detector(self):
enable = self._get_param("enable")
client_token = self._get_param("clientToken")
finding_publishing_frequency = self._get_param("findingPublishingFrequency")
data_sources = self._get_param("dataSources")
tags = self._get_param("tags")
detector_id = self.guardduty_backend.create_detector(
enable, client_token, finding_publishing_frequency, data_sources, tags
enable, finding_publishing_frequency, data_sources, tags
)
return 200, {}, json.dumps(dict(detectorId=detector_id))

View File

@ -335,11 +335,11 @@ class ManagedPolicy(Policy, CloudFormationModel):
@classmethod
def create_from_cloudformation_json(
cls, resource_physical_name, cloudformation_json, region_name, **kwargs
cls, resource_name, cloudformation_json, region_name, **kwargs
):
properties = cloudformation_json.get("Properties", {})
policy_document = json.dumps(properties.get("PolicyDocument"))
name = properties.get("ManagedPolicyName", resource_physical_name)
name = properties.get("ManagedPolicyName", resource_name)
description = properties.get("Description")
path = properties.get("Path")
group_names = properties.get("Groups", [])
@ -442,7 +442,7 @@ class InlinePolicy(CloudFormationModel):
@classmethod
def create_from_cloudformation_json(
cls, resource_physical_name, cloudformation_json, region_name, **kwargs
cls, resource_name, cloudformation_json, region_name, **kwargs
):
properties = cloudformation_json.get("Properties", {})
policy_document = properties.get("PolicyDocument")
@ -452,7 +452,7 @@ class InlinePolicy(CloudFormationModel):
group_names = properties.get("Groups")
return iam_backend.create_inline_policy(
resource_physical_name,
resource_name,
policy_name,
policy_document,
group_names,
@ -584,14 +584,10 @@ class Role(CloudFormationModel):
@classmethod
def create_from_cloudformation_json(
cls, resource_physical_name, cloudformation_json, region_name, **kwargs
cls, resource_name, cloudformation_json, region_name, **kwargs
):
properties = cloudformation_json["Properties"]
role_name = (
properties["RoleName"]
if "RoleName" in properties
else resource_physical_name
)
role_name = properties.get("RoleName", resource_name)
role = iam_backend.create_role(
role_name=role_name,
@ -707,8 +703,8 @@ class Role(CloudFormationModel):
return self.name
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -751,13 +747,13 @@ class InstanceProfile(CloudFormationModel):
@classmethod
def create_from_cloudformation_json(
cls, resource_physical_name, cloudformation_json, region_name, **kwargs
cls, resource_name, cloudformation_json, region_name, **kwargs
):
properties = cloudformation_json["Properties"]
role_names = properties["Roles"]
return iam_backend.create_instance_profile(
name=resource_physical_name,
name=resource_name,
path=properties.get("Path", "/"),
role_names=role_names,
)
@ -782,8 +778,8 @@ class InstanceProfile(CloudFormationModel):
return self.name
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -880,8 +876,8 @@ class AccessKey(CloudFormationModel):
return iso_8601_datetime_without_milliseconds(self.last_used)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["SecretAccessKey"]
def has_cfn_attr(cls, attr):
return attr in ["SecretAccessKey"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -900,7 +896,7 @@ class AccessKey(CloudFormationModel):
@classmethod
def create_from_cloudformation_json(
cls, resource_physical_name, cloudformation_json, region_name, **kwargs
cls, resource_name, cloudformation_json, region_name, **kwargs
):
properties = cloudformation_json.get("Properties", {})
user_name = properties.get("UserName")
@ -981,8 +977,8 @@ class Group(BaseModel):
return iso_8601_datetime_with_milliseconds(self.create_date)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -1145,8 +1141,8 @@ class User(CloudFormationModel):
self.ssh_public_keys.remove(key)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -1237,11 +1233,11 @@ class User(CloudFormationModel):
@classmethod
def create_from_cloudformation_json(
cls, resource_physical_name, cloudformation_json, region_name, **kwargs
cls, resource_name, cloudformation_json, region_name, **kwargs
):
properties = cloudformation_json.get("Properties", {})
path = properties.get("Path")
user, _ = iam_backend.create_user(resource_physical_name, path)
user, _ = iam_backend.create_user(resource_name, path)
return user
@classmethod
@ -2032,7 +2028,10 @@ class IAMBackend(BaseBackend):
role = self.get_role(role_name)
profile.roles.remove(role)
def get_all_server_certs(self, marker=None):
def list_server_certificates(self):
"""
Pagination is not yet implemented
"""
return self.certificates.values()
def upload_server_certificate(
@ -2080,7 +2079,10 @@ class IAMBackend(BaseBackend):
self.groups[group_name] = group
return group
def get_group(self, group_name, marker=None, max_items=None):
def get_group(self, group_name):
"""
Pagination is not yet implemented
"""
try:
return self.groups[group_name]
except KeyError:
@ -2105,7 +2107,10 @@ class IAMBackend(BaseBackend):
iam_policy_document_validator.validate()
group.put_policy(policy_name, policy_json)
def list_group_policies(self, group_name, marker=None, max_items=None):
def list_group_policies(self, group_name):
"""
Pagination is not yet implemented
"""
group = self.get_group(group_name)
return group.list_policies()
@ -2363,10 +2368,13 @@ class IAMBackend(BaseBackend):
def get_all_access_keys_for_all_users(self):
access_keys_list = []
for user_name in self.users:
access_keys_list += self.get_all_access_keys(user_name)
access_keys_list += self.list_access_keys(user_name)
return access_keys_list
def get_all_access_keys(self, user_name, marker=None, max_items=None):
def list_access_keys(self, user_name):
"""
Pagination is not yet implemented
"""
user = self.get_user(user_name)
keys = user.get_all_access_keys()
return keys
@ -2556,7 +2564,7 @@ class IAMBackend(BaseBackend):
# alias is force updated
self.account_aliases = [alias]
def delete_account_alias(self, alias):
def delete_account_alias(self):
self.account_aliases = []
def get_account_authorization_details(self, policy_filter):
@ -2619,7 +2627,7 @@ class IAMBackend(BaseBackend):
def get_user_from_access_key_id(self, access_key_id):
for user_name, user in self.users.items():
access_keys = self.get_all_access_keys(user_name)
access_keys = self.list_access_keys(user_name)
for access_key in access_keys:
if access_key.access_key_id == access_key_id:
return user

View File

@ -420,8 +420,8 @@ class IamResponse(BaseResponse):
template = self.response_template(UPLOAD_CERT_TEMPLATE)
return template.render(certificate=cert)
def list_server_certificates(self, marker=None):
certs = iam_backend.get_all_server_certs(marker=marker)
def list_server_certificates(self):
certs = iam_backend.list_server_certificates()
template = self.response_template(LIST_SERVER_CERTIFICATES_TEMPLATE)
return template.render(server_certificates=certs)
@ -475,10 +475,7 @@ class IamResponse(BaseResponse):
def list_group_policies(self):
group_name = self._get_param("GroupName")
marker = self._get_param("Marker")
max_items = self._get_param("MaxItems")
policies = iam_backend.list_group_policies(
group_name, marker=marker, max_items=max_items
)
policies = iam_backend.list_group_policies(group_name)
template = self.response_template(LIST_GROUP_POLICIES_TEMPLATE)
return template.render(
name="ListGroupPoliciesResponse", policies=policies, marker=marker
@ -675,7 +672,7 @@ class IamResponse(BaseResponse):
access_key = iam_backend.get_access_key_last_used(access_key_id)
user_name = access_key["user_name"]
keys = iam_backend.get_all_access_keys(user_name)
keys = iam_backend.list_access_keys(user_name)
template = self.response_template(LIST_ACCESS_KEYS_TEMPLATE)
return template.render(user_name=user_name, keys=keys)
@ -830,8 +827,7 @@ class IamResponse(BaseResponse):
return template.render()
def delete_account_alias(self):
alias = self._get_param("AccountAlias")
iam_backend.delete_account_alias(alias)
iam_backend.delete_account_alias()
template = self.response_template(DELETE_ACCOUNT_ALIAS_TEMPLATE)
return template.render()

View File

@ -6,7 +6,9 @@ from moto.core.responses import BaseResponse
class InstanceMetadataResponse(BaseResponse):
def metadata_response(self, request, full_url, headers):
def metadata_response(
self, request, full_url, headers
): # pylint: disable=unused-argument
"""
Mock response for localhost metadata

View File

@ -750,8 +750,10 @@ class IoTBackend(BaseBackend):
self.endpoint = FakeEndpoint(endpoint_type, self.region_name)
return self.endpoint
def delete_thing(self, thing_name, expected_version):
# TODO: handle expected_version
def delete_thing(self, thing_name):
"""
The ExpectedVersion-parameter is not yet implemented
"""
# can raise ResourceNotFoundError
thing = self.describe_thing(thing_name)
@ -781,9 +783,11 @@ class IoTBackend(BaseBackend):
thing_name,
thing_type_name,
attribute_payload,
expected_version,
remove_thing_type,
):
"""
The ExpectedVersion-parameter is not yet implemented
"""
# if attributes payload = {}, nothing
thing = self.describe_thing(thing_name)
thing_type = None
@ -921,10 +925,12 @@ class IoTBackend(BaseBackend):
def register_ca_certificate(
self,
ca_certificate,
verification_certificate,
set_as_active,
registration_config,
):
"""
The VerificationCertificate-parameter is not yet implemented
"""
certificate = FakeCaCertificate(
ca_certificate=ca_certificate,
status="ACTIVE" if set_as_active else "INACTIVE",
@ -1204,7 +1210,10 @@ class IoTBackend(BaseBackend):
self.thing_groups[thing_group.arn] = thing_group
return thing_group.thing_group_name, thing_group.arn, thing_group.thing_group_id
def delete_thing_group(self, thing_group_name, expected_version):
def delete_thing_group(self, thing_group_name):
"""
The ExpectedVersion-parameter is not yet implemented
"""
child_groups = [
thing_group
for _, thing_group in self.thing_groups.items()
@ -1328,9 +1337,9 @@ class IoTBackend(BaseBackend):
return
del thing_group.things[thing.arn]
def list_things_in_thing_group(self, thing_group_name, recursive):
def list_things_in_thing_group(self, thing_group_name):
"""
The recursive-parameter is not yet implemented
Pagination and the recursive-parameter is not yet implemented
"""
thing_group = self.describe_thing_group(thing_group_name)
return thing_group.things.values()
@ -1435,16 +1444,10 @@ class IoTBackend(BaseBackend):
def get_job_document(self, job_id):
return self.jobs[job_id]
def list_jobs(
self,
status,
target_selection,
max_results,
token,
thing_group_name,
thing_group_id,
):
# TODO: implement filters
def list_jobs(self, max_results, token):
"""
The following parameter are not yet implemented: Status, TargetSelection, ThingGroupName, ThingGroupId
"""
all_jobs = [_.to_dict() for _ in self.jobs.values()]
filtered_jobs = all_jobs
@ -1476,9 +1479,10 @@ class IoTBackend(BaseBackend):
return job_execution
def cancel_job_execution(
self, job_id, thing_name, force, expected_version, status_details
):
def cancel_job_execution(self, job_id, thing_name, force):
"""
The parameters ExpectedVersion and StatusDetails are not yet implemented
"""
job_execution = self.job_executions[(job_id, thing_name)]
if job_execution is None:
@ -1607,10 +1611,12 @@ class IoTBackend(BaseBackend):
domain_configuration_name,
domain_name,
server_certificate_arns,
validation_certificate_arn,
authorizer_config,
service_type,
):
"""
The ValidationCertificateArn-parameter is not yet implemented
"""
if domain_configuration_name in self.domain_configurations:
raise ResourceAlreadyExistsException(
"Domain configuration with given name already exists.",

View File

@ -107,10 +107,7 @@ class IoTResponse(BaseResponse):
def delete_thing(self):
thing_name = self._get_param("thingName")
expected_version = self._get_param("expectedVersion")
self.iot_backend.delete_thing(
thing_name=thing_name, expected_version=expected_version
)
self.iot_backend.delete_thing(thing_name=thing_name)
return json.dumps(dict())
def delete_thing_type(self):
@ -130,13 +127,11 @@ class IoTResponse(BaseResponse):
thing_name = self._get_param("thingName")
thing_type_name = self._get_param("thingTypeName")
attribute_payload = self._get_param("attributePayload")
expected_version = self._get_param("expectedVersion")
remove_thing_type = self._get_param("removeThingType")
self.iot_backend.update_thing(
thing_name=thing_name,
thing_type_name=thing_type_name,
attribute_payload=attribute_payload,
expected_version=expected_version,
remove_thing_type=remove_thing_type,
)
return json.dumps(dict())
@ -213,21 +208,11 @@ class IoTResponse(BaseResponse):
return json.dumps({"document": ""})
def list_jobs(self):
status = (self._get_param("status"),)
target_selection = (self._get_param("targetSelection"),)
max_results = self._get_int_param(
"maxResults", 50
) # not the default, but makes testing easier
# not the default, but makes testing easier
max_results = self._get_int_param("maxResults", 50)
previous_next_token = self._get_param("nextToken")
thing_group_name = (self._get_param("thingGroupName"),)
thing_group_id = self._get_param("thingGroupId")
jobs, next_token = self.iot_backend.list_jobs(
status=status,
target_selection=target_selection,
max_results=max_results,
token=previous_next_token,
thing_group_name=thing_group_name,
thing_group_id=thing_group_id,
max_results=max_results, token=previous_next_token
)
return json.dumps(dict(jobs=jobs, nextToken=next_token))
@ -246,15 +231,9 @@ class IoTResponse(BaseResponse):
job_id = self._get_param("jobId")
thing_name = self._get_param("thingName")
force = self._get_bool_param("force")
expected_version = self._get_int_param("expectedVersion")
status_details = self._get_param("statusDetails")
self.iot_backend.cancel_job_execution(
job_id=job_id,
thing_name=thing_name,
force=force,
expected_version=expected_version,
status_details=status_details,
job_id=job_id, thing_name=thing_name, force=force
)
return json.dumps(dict())
@ -366,13 +345,11 @@ class IoTResponse(BaseResponse):
def register_ca_certificate(self):
ca_certificate = self._get_param("caCertificate")
verification_certificate = self._get_param("verificationCertificate")
set_as_active = self._get_bool_param("setAsActive")
registration_config = self._get_param("registrationConfig")
cert = self.iot_backend.register_ca_certificate(
ca_certificate=ca_certificate,
verification_certificate=verification_certificate,
set_as_active=set_as_active,
registration_config=registration_config,
)
@ -621,10 +598,7 @@ class IoTResponse(BaseResponse):
def delete_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
expected_version = self._get_param("expectedVersion")
self.iot_backend.delete_thing_group(
thing_group_name=thing_group_name, expected_version=expected_version
)
self.iot_backend.delete_thing_group(thing_group_name=thing_group_name)
return json.dumps(dict())
def list_thing_groups(self):
@ -684,15 +658,11 @@ class IoTResponse(BaseResponse):
def list_things_in_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
recursive = self._get_param("recursive")
# next_token = self._get_param("nextToken")
# max_results = self._get_int_param("maxResults")
things = self.iot_backend.list_things_in_thing_group(
thing_group_name=thing_group_name, recursive=recursive
thing_group_name=thing_group_name
)
next_token = None
thing_names = [_.thing_name for _ in things]
# TODO: implement pagination in the future
return json.dumps(dict(things=thing_names, nextToken=next_token))
def list_thing_groups_for_thing(self):
@ -765,7 +735,6 @@ class IoTResponse(BaseResponse):
domain_configuration_name=self._get_param("domainConfigurationName"),
domain_name=self._get_param("domainName"),
server_certificate_arns=self._get_param("serverCertificateArns"),
validation_certificate_arn=self._get_param("validationCertificateArn"),
authorizer_config=self._get_param("authorizerConfig"),
service_type=self._get_param("serviceType"),
)

View File

@ -199,7 +199,7 @@ class IoTDataPlaneBackend(BaseBackend):
thing.thing_shadow = new_shadow
return thing.thing_shadow
def publish(self, topic, qos, payload):
def publish(self, topic, payload):
self.published_payloads.append((topic, payload))

View File

@ -42,6 +42,5 @@ class IoTDataPlaneResponse(BaseResponse):
def publish(self):
topic = self._get_param("target")
qos = self._get_int_param("qos")
self.iotdata_backend.publish(topic=topic, qos=qos, payload=self.body)
self.iotdata_backend.publish(topic=topic, payload=self.body)
return json.dumps(dict())

View File

@ -245,9 +245,7 @@ class Stream(CloudFormationModel):
for index in records:
record = records[index]
self.put_record(
record.partition_key, record.explicit_hash_key, None, record.data
)
self.put_record(record.partition_key, record.explicit_hash_key, record.data)
def merge_shards(self, shard_to_merge, adjacent_shard_to_merge):
shard1 = self.shards[shard_to_merge]
@ -371,9 +369,7 @@ class Stream(CloudFormationModel):
if shard.starting_hash <= key < shard.ending_hash:
return shard
def put_record(
self, partition_key, explicit_hash_key, sequence_number_for_ordering, data
):
def put_record(self, partition_key, explicit_hash_key, data):
shard = self.get_shard_for_key(partition_key, explicit_hash_key)
sequence_number = shard.put_record(partition_key, data, explicit_hash_key)
@ -488,8 +484,8 @@ class Stream(CloudFormationModel):
)
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -594,13 +590,12 @@ class KinesisBackend(BaseBackend):
stream_name,
partition_key,
explicit_hash_key,
sequence_number_for_ordering,
data,
):
stream = self.describe_stream(stream_name)
sequence_number, shard_id = stream.put_record(
partition_key, explicit_hash_key, sequence_number_for_ordering, data
partition_key, explicit_hash_key, data
)
return sequence_number, shard_id
@ -616,7 +611,7 @@ class KinesisBackend(BaseBackend):
data = record.get("Data")
sequence_number, shard_id = stream.put_record(
partition_key, explicit_hash_key, None, data
partition_key, explicit_hash_key, data
)
response["Records"].append(
{"SequenceNumber": sequence_number, "ShardId": shard_id}

View File

@ -98,14 +98,12 @@ class KinesisResponse(BaseResponse):
stream_name = self.parameters.get("StreamName")
partition_key = self.parameters.get("PartitionKey")
explicit_hash_key = self.parameters.get("ExplicitHashKey")
sequence_number_for_ordering = self.parameters.get("SequenceNumberForOrdering")
data = self.parameters.get("Data")
sequence_number, shard_id = self.kinesis_backend.put_record(
stream_name,
partition_key,
explicit_hash_key,
sequence_number_for_ordering,
data,
)

View File

@ -113,12 +113,18 @@ class KinesisVideoBackend(BaseBackend):
stream_info = stream.to_dict()
return stream_info
def list_streams(self, max_results, next_token, stream_name_condition):
def list_streams(self):
"""
Pagination and the StreamNameCondition-parameter are not yet implemented
"""
stream_info_list = [_.to_dict() for _ in self.streams.values()]
next_token = None
return stream_info_list, next_token
def delete_stream(self, stream_arn, current_version):
def delete_stream(self, stream_arn):
"""
The CurrentVersion-parameter is not yet implemented
"""
stream = self.streams.get(stream_arn)
if stream is None:
raise ResourceNotFoundException()

View File

@ -36,22 +36,12 @@ class KinesisVideoResponse(BaseResponse):
return json.dumps(dict(StreamInfo=stream_info))
def list_streams(self):
max_results = self._get_int_param("MaxResults")
next_token = self._get_param("NextToken")
stream_name_condition = self._get_param("StreamNameCondition")
stream_info_list, next_token = self.kinesisvideo_backend.list_streams(
max_results=max_results,
next_token=next_token,
stream_name_condition=stream_name_condition,
)
stream_info_list, next_token = self.kinesisvideo_backend.list_streams()
return json.dumps(dict(StreamInfoList=stream_info_list, NextToken=next_token))
def delete_stream(self):
stream_arn = self._get_param("StreamARN")
current_version = self._get_param("CurrentVersion")
self.kinesisvideo_backend.delete_stream(
stream_arn=stream_arn, current_version=current_version
)
self.kinesisvideo_backend.delete_stream(stream_arn=stream_arn)
return json.dumps(dict())
def get_data_endpoint(self):

View File

@ -28,40 +28,19 @@ class KinesisVideoArchivedMediaBackend(BaseBackend):
url = "{}{}?SessionToken={}".format(data_endpoint, relative_path, session_token)
return url
def get_hls_streaming_session_url(
self,
stream_name,
stream_arn,
playback_mode,
hls_fragment_selector,
container_format,
discontinuity_mode,
display_fragment_timestamp,
expires,
max_media_playlist_fragment_results,
):
def get_hls_streaming_session_url(self, stream_name, stream_arn):
# Ignore option paramters as the format of hls_url does't depends on them
api_name = "GET_HLS_STREAMING_SESSION_URL"
url = self._get_streaming_url(stream_name, stream_arn, api_name)
return url
def get_dash_streaming_session_url(
self,
stream_name,
stream_arn,
playback_mode,
display_fragment_timestamp,
display_fragment_number,
dash_fragment_selector,
expires,
max_manifest_fragment_results,
):
def get_dash_streaming_session_url(self, stream_name, stream_arn):
# Ignore option paramters as the format of hls_url does't depends on them
api_name = "GET_DASH_STREAMING_SESSION_URL"
url = self._get_streaming_url(stream_name, stream_arn, api_name)
return url
def get_clip(self, stream_name, stream_arn, clip_fragment_selector):
def get_clip(self, stream_name, stream_arn):
kinesisvideo_backends[self.region_name]._get_stream(stream_name, stream_arn)
content_type = "video/mp4" # Fixed content_type as it depends on input stream
payload = b"sample-mp4-video"

View File

@ -13,26 +13,9 @@ class KinesisVideoArchivedMediaResponse(BaseResponse):
def get_hls_streaming_session_url(self):
stream_name = self._get_param("StreamName")
stream_arn = self._get_param("StreamARN")
playback_mode = self._get_param("PlaybackMode")
hls_fragment_selector = self._get_param("HLSFragmentSelector")
container_format = self._get_param("ContainerFormat")
discontinuity_mode = self._get_param("DiscontinuityMode")
display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp")
expires = self._get_int_param("Expires")
max_media_playlist_fragment_results = self._get_param(
"MaxMediaPlaylistFragmentResults"
)
hls_streaming_session_url = (
self.kinesisvideoarchivedmedia_backend.get_hls_streaming_session_url(
stream_name=stream_name,
stream_arn=stream_arn,
playback_mode=playback_mode,
hls_fragment_selector=hls_fragment_selector,
container_format=container_format,
discontinuity_mode=discontinuity_mode,
display_fragment_timestamp=display_fragment_timestamp,
expires=expires,
max_media_playlist_fragment_results=max_media_playlist_fragment_results,
stream_name=stream_name, stream_arn=stream_arn
)
)
return json.dumps(dict(HLSStreamingSessionURL=hls_streaming_session_url))
@ -40,22 +23,9 @@ class KinesisVideoArchivedMediaResponse(BaseResponse):
def get_dash_streaming_session_url(self):
stream_name = self._get_param("StreamName")
stream_arn = self._get_param("StreamARN")
playback_mode = self._get_param("PlaybackMode")
display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp")
display_fragment_number = self._get_param("DisplayFragmentNumber")
dash_fragment_selector = self._get_param("DASHFragmentSelector")
expires = self._get_int_param("Expires")
max_manifest_fragment_results = self._get_param("MaxManifestFragmentResults")
dash_streaming_session_url = (
self.kinesisvideoarchivedmedia_backend.get_dash_streaming_session_url(
stream_name=stream_name,
stream_arn=stream_arn,
playback_mode=playback_mode,
display_fragment_timestamp=display_fragment_timestamp,
display_fragment_number=display_fragment_number,
dash_fragment_selector=dash_fragment_selector,
expires=expires,
max_manifest_fragment_results=max_manifest_fragment_results,
stream_name=stream_name, stream_arn=stream_arn
)
)
return json.dumps(dict(DASHStreamingSessionURL=dash_streaming_session_url))
@ -63,11 +33,8 @@ class KinesisVideoArchivedMediaResponse(BaseResponse):
def get_clip(self):
stream_name = self._get_param("StreamName")
stream_arn = self._get_param("StreamARN")
clip_fragment_selector = self._get_param("ClipFragmentSelector")
content_type, payload = self.kinesisvideoarchivedmedia_backend.get_clip(
stream_name=stream_name,
stream_arn=stream_arn,
clip_fragment_selector=clip_fragment_selector,
stream_name=stream_name, stream_arn=stream_arn
)
new_headers = {"Content-Type": content_type}
return payload, new_headers

View File

@ -148,8 +148,8 @@ class Key(CloudFormationModel):
return key
@classmethod
def has_cfn_attr(cls, attribute):
return attribute in ["Arn"]
def has_cfn_attr(cls, attr):
return attr in ["Arn"]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -361,9 +361,7 @@ class KmsBackend(BaseBackend):
)
return new_ciphertext_blob, decrypting_arn, encrypting_arn
def generate_data_key(
self, key_id, encryption_context, number_of_bytes, key_spec, grant_tokens
):
def generate_data_key(self, key_id, encryption_context, number_of_bytes, key_spec):
key_id = self.any_id_to_key_id(key_id)
if key_spec:

View File

@ -462,7 +462,6 @@ class KmsResponse(BaseResponse):
encryption_context = self.parameters.get("EncryptionContext", {})
number_of_bytes = self.parameters.get("NumberOfBytes")
key_spec = self.parameters.get("KeySpec")
grant_tokens = self.parameters.get("GrantTokens")
# Param validation
self._validate_key_id(key_id)
@ -499,7 +498,6 @@ class KmsResponse(BaseResponse):
encryption_context=encryption_context,
number_of_bytes=number_of_bytes,
key_spec=key_spec,
grant_tokens=grant_tokens,
)
plaintext_response = base64.b64encode(plaintext).decode("utf-8")

View File

@ -105,9 +105,7 @@ class LogStream(BaseModel):
res.update(rest)
return res
def put_log_events(
self, log_group_name, log_stream_name, log_events, sequence_token
):
def put_log_events(self, log_group_name, log_stream_name, log_events):
# TODO: ensure sequence_token
# TODO: to be thread safe this would need a lock
self.last_ingestion_time = int(unix_time_millis())
@ -160,8 +158,6 @@ class LogStream(BaseModel):
def get_log_events(
self,
log_group_name,
log_stream_name,
start_time,
end_time,
limit,
@ -238,17 +234,7 @@ class LogStream(BaseModel):
"f/{:056d}".format(end_index),
)
def filter_log_events(
self,
log_group_name,
log_stream_names,
start_time,
end_time,
limit,
next_token,
filter_pattern,
interleaved,
):
def filter_log_events(self, start_time, end_time, filter_pattern):
if filter_pattern:
raise NotImplementedError("filter_pattern is not yet implemented")
@ -375,19 +361,14 @@ class LogGroup(CloudFormationModel):
return log_streams_page, new_token
def put_log_events(
self, log_group_name, log_stream_name, log_events, sequence_token
):
def put_log_events(self, log_group_name, log_stream_name, log_events):
if log_stream_name not in self.streams:
raise ResourceNotFoundException("The specified log stream does not exist.")
stream = self.streams[log_stream_name]
return stream.put_log_events(
log_group_name, log_stream_name, log_events, sequence_token
)
return stream.put_log_events(log_group_name, log_stream_name, log_events)
def get_log_events(
self,
log_group_name,
log_stream_name,
start_time,
end_time,
@ -399,8 +380,6 @@ class LogGroup(CloudFormationModel):
raise ResourceNotFoundException()
stream = self.streams[log_stream_name]
return stream.get_log_events(
log_group_name,
log_stream_name,
start_time,
end_time,
limit,
@ -429,16 +408,7 @@ class LogGroup(CloudFormationModel):
events = []
for stream in streams:
events += stream.filter_log_events(
log_group_name,
log_stream_names,
start_time,
end_time,
limit,
next_token,
filter_pattern,
interleaved,
)
events += stream.filter_log_events(start_time, end_time, filter_pattern)
if interleaved:
events = sorted(events, key=lambda event: event["timestamp"])
@ -728,10 +698,10 @@ class LogsBackend(BaseBackend):
order_by=order_by,
)
def put_log_events(
self, log_group_name, log_stream_name, log_events, sequence_token
):
# TODO: add support for sequence_tokens
def put_log_events(self, log_group_name, log_stream_name, log_events):
"""
The SequenceToken-parameter is not yet implemented
"""
if log_group_name not in self.groups:
raise ResourceNotFoundException()
log_group = self.groups[log_group_name]
@ -756,7 +726,7 @@ class LogsBackend(BaseBackend):
last_timestamp = event["timestamp"]
token = log_group.put_log_events(
log_group_name, log_stream_name, allowed_events, sequence_token
log_group_name, log_stream_name, allowed_events
)
return token, rejected_info
@ -780,13 +750,7 @@ class LogsBackend(BaseBackend):
)
log_group = self.groups[log_group_name]
return log_group.get_log_events(
log_group_name,
log_stream_name,
start_time,
end_time,
limit,
next_token,
start_from_head,
log_stream_name, start_time, end_time, limit, next_token, start_from_head
)
def filter_log_events(
@ -977,17 +941,7 @@ class LogsBackend(BaseBackend):
self.queries[query_id] = LogQuery(query_id, start_time, end_time, query_string)
return query_id
def create_export_task(
self,
*,
task_name,
log_group_name,
log_stream_name_prefix,
fromTime,
to,
destination,
destination_prefix,
):
def create_export_task(self, log_group_name, destination):
s3_backend.get_bucket(destination)
if log_group_name not in self.groups:
raise ResourceNotFoundException()

View File

@ -41,8 +41,8 @@ class LogsResponse(BaseResponse):
except ValueError:
return {}
def _get_param(self, param, if_none=None):
return self.request_params.get(param, if_none)
def _get_param(self, param_name, if_none=None):
return self.request_params.get(param_name, if_none)
def _get_validated_param(
self, param, constraint, constraint_expression, pattern=None
@ -212,10 +212,9 @@ class LogsResponse(BaseResponse):
log_group_name = self._get_param("logGroupName")
log_stream_name = self._get_param("logStreamName")
log_events = self._get_param("logEvents")
sequence_token = self._get_param("sequenceToken")
next_sequence_token, rejected_info = self.logs_backend.put_log_events(
log_group_name, log_stream_name, log_events, sequence_token
log_group_name, log_stream_name, log_events
)
if rejected_info:
return json.dumps(
@ -381,21 +380,9 @@ class LogsResponse(BaseResponse):
return json.dumps({"queryId": "{0}".format(query_id)})
def create_export_task(self):
task_name = self._get_param("taskName")
log_group_name = self._get_param("logGroupName")
log_group_name = self._get_param("logGroupName")
log_stream_name_prefix = self._get_param("logStreamNamePrefix")
fromTime = self._get_param("from")
to = self._get_param("to")
destination = self._get_param("destination")
destination_prefix = self._get_param("destinationPrefix")
task_id = self.logs_backend.create_export_task(
task_name=task_name,
log_group_name=log_group_name,
log_stream_name_prefix=log_stream_name_prefix,
fromTime=fromTime,
to=to,
destination=destination,
destination_prefix=destination_prefix,
log_group_name=log_group_name, destination=destination
)
return json.dumps(dict(taskId=str(task_id)))

View File

@ -17,12 +17,12 @@ def exception_handler(f):
class ManagedBlockchainClientError(HTTPException):
code = 400
def __init__(self, error_type, message, **kwargs):
def __init__(self, error_type, message):
self.error_type = error_type
self.message = message
self.description = json.dumps({"message": self.message})
def get_headers(self, *args, **kwargs):
def get_headers(self, *args, **kwargs): # pylint: disable=unused-argument
return [
("Content-Type", "application/json"),
("x-amzn-ErrorType", self.error_type),
@ -32,7 +32,7 @@ class ManagedBlockchainClientError(HTTPException):
def response(self):
return self.get_body()
def get_body(self, *args, **kwargs):
def get_body(self, *args, **kwargs): # pylint: disable=unused-argument
return self.description

View File

@ -26,23 +26,21 @@ class ManagedBlockchainResponse(BaseResponse):
response_instance = ManagedBlockchainResponse(
managedblockchain_backends[region_name]
)
return response_instance._network_response(request, full_url, headers)
return response_instance._network_response(request, headers)
def _network_response(self, request, full_url, headers):
def _network_response(self, request, headers):
method = request.method
if hasattr(request, "body"):
body = request.body
else:
body = request.data
parsed_url = urlparse(full_url)
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
if method == "GET":
return self._all_networks_response(request, full_url, headers)
return self._all_networks_response(headers)
elif method == "POST":
json_body = json.loads(body.decode("utf-8"))
return self._network_response_post(json_body, querystring, headers)
return self._network_response_post(json_body, headers)
def _all_networks_response(self, request, full_url, headers):
def _all_networks_response(self, headers):
mbcnetworks = self.backend.list_networks()
response = json.dumps(
{"Networks": [mbcnetwork.to_dict() for mbcnetwork in mbcnetworks]}
@ -50,7 +48,7 @@ class ManagedBlockchainResponse(BaseResponse):
headers["content-type"] = "application/json"
return 200, headers, response
def _network_response_post(self, json_body, querystring, headers):
def _network_response_post(self, json_body, headers):
name = json_body["Name"]
framework = json_body["Framework"]
frameworkversion = json_body["FrameworkVersion"]
@ -109,16 +107,12 @@ class ManagedBlockchainResponse(BaseResponse):
body = request.body
else:
body = request.data
parsed_url = urlparse(full_url)
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
network_id = networkid_from_managedblockchain_url(full_url)
if method == "GET":
return self._all_proposals_response(network_id, headers)
elif method == "POST":
json_body = json.loads(body.decode("utf-8"))
return self._proposal_response_post(
network_id, json_body, querystring, headers
)
return self._proposal_response_post(network_id, json_body, headers)
def _all_proposals_response(self, network_id, headers):
proposals = self.backend.list_proposals(network_id)
@ -128,7 +122,7 @@ class ManagedBlockchainResponse(BaseResponse):
headers["content-type"] = "application/json"
return 200, headers, response
def _proposal_response_post(self, network_id, json_body, querystring, headers):
def _proposal_response_post(self, network_id, json_body, headers):
memberid = json_body["MemberId"]
actions = json_body["Actions"]
@ -177,8 +171,6 @@ class ManagedBlockchainResponse(BaseResponse):
body = request.body
else:
body = request.data
parsed_url = urlparse(full_url)
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
network_id = networkid_from_managedblockchain_url(full_url)
proposal_id = proposalid_from_managedblockchain_url(full_url)
if method == "GET":
@ -186,7 +178,7 @@ class ManagedBlockchainResponse(BaseResponse):
elif method == "POST":
json_body = json.loads(body.decode("utf-8"))
return self._proposal_votes_response_post(
network_id, proposal_id, json_body, querystring, headers
network_id, proposal_id, json_body, headers
)
def _all_proposal_votes_response(self, network_id, proposal_id, headers):
@ -196,7 +188,7 @@ class ManagedBlockchainResponse(BaseResponse):
return 200, headers, response
def _proposal_votes_response_post(
self, network_id, proposal_id, json_body, querystring, headers
self, network_id, proposal_id, json_body, headers
):
votermemberid = json_body["VoterMemberId"]
vote = json_body["Vote"]
@ -211,14 +203,14 @@ class ManagedBlockchainResponse(BaseResponse):
response_instance = ManagedBlockchainResponse(
managedblockchain_backends[region_name]
)
return response_instance._invitation_response(request, full_url, headers)
return response_instance._invitation_response(request, headers)
def _invitation_response(self, request, full_url, headers):
def _invitation_response(self, request, headers):
method = request.method
if method == "GET":
return self._all_invitation_response(request, full_url, headers)
return self._all_invitation_response(headers)
def _all_invitation_response(self, request, full_url, headers):
def _all_invitation_response(self, headers):
invitations = self.backend.list_invitations()
response = json.dumps(
{"Invitations": [invitation.to_dict() for invitation in invitations]}
@ -261,16 +253,12 @@ class ManagedBlockchainResponse(BaseResponse):
body = request.body
else:
body = request.data
parsed_url = urlparse(full_url)
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
network_id = networkid_from_managedblockchain_url(full_url)
if method == "GET":
return self._all_members_response(network_id, headers)
elif method == "POST":
json_body = json.loads(body.decode("utf-8"))
return self._member_response_post(
network_id, json_body, querystring, headers
)
return self._member_response_post(network_id, json_body, headers)
def _all_members_response(self, network_id, headers):
members = self.backend.list_members(network_id)
@ -278,7 +266,7 @@ class ManagedBlockchainResponse(BaseResponse):
headers["content-type"] = "application/json"
return 200, headers, response
def _member_response_post(self, network_id, json_body, querystring, headers):
def _member_response_post(self, network_id, json_body, headers):
invitationid = json_body["InvitationId"]
member_configuration = json_body["MemberConfiguration"]
@ -356,9 +344,7 @@ class ManagedBlockchainResponse(BaseResponse):
return self._all_nodes_response(network_id, member_id, status, headers)
elif method == "POST":
json_body = json.loads(body.decode("utf-8"))
return self._node_response_post(
network_id, member_id, json_body, querystring, headers
)
return self._node_response_post(network_id, member_id, json_body, headers)
def _all_nodes_response(self, network_id, member_id, status, headers):
nodes = self.backend.list_nodes(network_id, member_id, status)
@ -366,9 +352,7 @@ class ManagedBlockchainResponse(BaseResponse):
headers["content-type"] = "application/json"
return 200, headers, response
def _node_response_post(
self, network_id, member_id, json_body, querystring, headers
):
def _node_response_post(self, network_id, member_id, json_body, headers):
instancetype = json_body["NodeConfiguration"]["InstanceType"]
availabilityzone = json_body["NodeConfiguration"]["AvailabilityZone"]
logpublishingconfiguration = json_body["NodeConfiguration"][

View File

@ -7,7 +7,7 @@ from moto.mediaconnect.exceptions import NotFoundException
class Flow(BaseModel):
def __init__(self, *args, **kwargs):
def __init__(self, **kwargs):
self.availability_zone = kwargs.get("availability_zone")
self.entitlements = kwargs.get("entitlements", [])
self.name = kwargs.get("name")
@ -59,7 +59,7 @@ class Flow(BaseModel):
class Resource(BaseModel):
def __init__(self, *args, **kwargs):
def __init__(self, **kwargs):
self.resource_arn = kwargs.get("resource_arn")
self.tags = OrderedDict()

View File

@ -6,7 +6,7 @@ from moto.core.utils import BackendDict
class Input(BaseModel):
def __init__(self, *args, **kwargs):
def __init__(self, **kwargs):
self.arn = kwargs.get("arn")
self.attached_channels = kwargs.get("attached_channels", [])
self.destinations = kwargs.get("destinations", [])
@ -54,7 +54,7 @@ class Input(BaseModel):
class Channel(BaseModel):
def __init__(self, *args, **kwargs):
def __init__(self, **kwargs):
self.arn = kwargs.get("arn")
self.cdi_input_specification = kwargs.get("cdi_input_specification")
self.channel_class = kwargs.get("channel_class", "STANDARD")
@ -134,11 +134,12 @@ class MediaLiveBackend(BaseBackend):
input_specification,
log_level,
name,
request_id,
reserved,
role_arn,
tags,
):
"""
The RequestID and Reserved parameters are not yet implemented
"""
channel_id = uuid4().hex
arn = "arn:aws:medialive:channel:{}".format(channel_id)
channel = Channel(
@ -225,13 +226,14 @@ class MediaLiveBackend(BaseBackend):
input_security_groups,
media_connect_flows,
name,
request_id,
role_arn,
sources,
tags,
input_type,
vpc,
):
"""
The VPC and RequestId parameters are not yet implemented
"""
input_id = uuid4().hex
arn = "arn:aws:medialive:input:{}".format(input_id)
a_input = Input(

View File

@ -19,8 +19,6 @@ class MediaLiveResponse(BaseResponse):
input_specification = self._get_param("inputSpecification")
log_level = self._get_param("logLevel")
name = self._get_param("name")
request_id = self._get_param("requestId")
reserved = self._get_param("reserved")
role_arn = self._get_param("roleArn")
tags = self._get_param("tags")
channel = self.medialive_backend.create_channel(
@ -32,8 +30,6 @@ class MediaLiveResponse(BaseResponse):
input_specification=input_specification,
log_level=log_level,
name=name,
request_id=request_id,
reserved=reserved,
role_arn=role_arn,
tags=tags,
)
@ -98,24 +94,20 @@ class MediaLiveResponse(BaseResponse):
input_security_groups = self._get_param("inputSecurityGroups")
media_connect_flows = self._get_param("mediaConnectFlows")
name = self._get_param("name")
request_id = self._get_param("requestId")
role_arn = self._get_param("roleArn")
sources = self._get_param("sources")
tags = self._get_param("tags")
input_type = self._get_param("type")
vpc = self._get_param("vpc")
a_input = self.medialive_backend.create_input(
destinations=destinations,
input_devices=input_devices,
input_security_groups=input_security_groups,
media_connect_flows=media_connect_flows,
name=name,
request_id=request_id,
role_arn=role_arn,
sources=sources,
tags=tags,
input_type=input_type,
vpc=vpc,
)
return json.dumps({"input": a_input.to_dict()})

View File

@ -7,7 +7,7 @@ from .exceptions import ClientError
class Channel(BaseModel):
def __init__(self, *args, **kwargs):
def __init__(self, **kwargs):
self.arn = kwargs.get("arn")
self.channel_id = kwargs.get("channel_id")
self.description = kwargs.get("description")
@ -27,7 +27,7 @@ class Channel(BaseModel):
class OriginEndpoint(BaseModel):
def __init__(self, *args, **kwargs):
def __init__(self, **kwargs):
self.arn = kwargs.get("arn")
self.authorization = kwargs.get("authorization")
self.channel_id = kwargs.get("channel_id")

View File

@ -11,7 +11,7 @@ from .exceptions import (
class Container(BaseModel):
def __init__(self, *args, **kwargs):
def __init__(self, **kwargs):
self.arn = kwargs.get("arn")
self.name = kwargs.get("name")
self.endpoint = kwargs.get("endpoint")
@ -74,7 +74,10 @@ class MediaStoreBackend(BaseBackend):
container.status = "ACTIVE"
return container
def list_containers(self, next_token, max_results):
def list_containers(self):
"""
Pagination is not yet implemented
"""
containers = list(self._containers.values())
response_containers = [c.to_dict() for c in containers]
return response_containers, None

View File

@ -28,11 +28,7 @@ class MediaStoreResponse(BaseResponse):
return json.dumps(dict(Container=container.to_dict()))
def list_containers(self):
next_token = self._get_param("NextToken")
max_results = self._get_int_param("MaxResults")
containers, next_token = self.mediastore_backend.list_containers(
next_token=next_token, max_results=max_results
)
containers, next_token = self.mediastore_backend.list_containers()
return json.dumps(dict(dict(Containers=containers), NextToken=next_token))
def list_tags_for_resource(self):

View File

@ -39,15 +39,10 @@ class MediaStoreDataBackend(BaseBackend):
self.__dict__ = {}
self.__init__(region_name)
def put_object(
self,
body,
path,
content_type=None,
cache_control=None,
storage_class="TEMPORAL",
upload_availability="STANDARD",
):
def put_object(self, body, path, storage_class="TEMPORAL"):
"""
The following parameters are not yet implemented: ContentType, CacheControl, UploadAvailability
"""
new_object = Object(
path=path, body=body, etag="etag", storage_class=storage_class
)
@ -61,7 +56,7 @@ class MediaStoreDataBackend(BaseBackend):
del self._objects[path]
return {}
def get_object(self, path, object_range=None):
def get_object(self, path):
"""
The Range-parameter is not yet supported.
"""
@ -71,7 +66,7 @@ class MediaStoreDataBackend(BaseBackend):
raise ClientError(error, "Object with id={} not found".format(path))
return objects_found[0]
def list_items(self, path, max_results=1000, next_token=None):
def list_items(self):
"""
The Path- and MaxResults-parameters are not yet supported.
"""

View File

@ -13,10 +13,7 @@ class MediaStoreDataResponse(BaseResponse):
def get_object(self):
path = self._get_param("Path")
object_range = self._get_param("Range")
result = self.mediastoredata_backend.get_object(
path=path, object_range=object_range
)
result = self.mediastoredata_backend.get_object(path=path)
headers = {"Path": result.path}
return result.body, headers
@ -33,11 +30,5 @@ class MediaStoreDataResponse(BaseResponse):
return json.dumps(result)
def list_items(self):
path = self._get_param("Path")
max_results = self._get_param("MaxResults")
next_token = self._get_param("NextToken")
items = self.mediastoredata_backend.list_items(
path=path, max_results=max_results, next_token=next_token
)
response_items = json.dumps(dict(Items=items))
return response_items
items = self.mediastoredata_backend.list_items()
return json.dumps(dict(Items=items))

View File

@ -368,7 +368,6 @@ class MQBackend(BaseBackend):
auto_minor_version_upgrade,
broker_name,
configuration,
creator_request_id,
deployment_mode,
encryption_options,
engine_type,

View File

@ -95,7 +95,6 @@ class MQResponse(BaseResponse):
auto_minor_version_upgrade = params.get("autoMinorVersionUpgrade")
broker_name = params.get("brokerName")
configuration = params.get("configuration")
creator_request_id = params.get("creatorRequestId")
deployment_mode = params.get("deploymentMode")
encryption_options = params.get("encryptionOptions")
engine_type = params.get("engineType")
@ -115,7 +114,6 @@ class MQResponse(BaseResponse):
auto_minor_version_upgrade=auto_minor_version_upgrade,
broker_name=broker_name,
configuration=configuration,
creator_request_id=creator_request_id,
deployment_mode=deployment_mode,
encryption_options=encryption_options,
engine_type=engine_type,

Some files were not shown because too many files have changed in this diff Show More