diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 02001259e..b1701b6c1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -23,9 +23,9 @@ You should be able to run `make init` to install the dependencies and then `make ## Linting -Ensure that the correct version of black is installed (see `requirements-dev.txt`). Different versions of black will return different results. +Ensure that the correct version of ruff is installed (see `requirements-dev.txt`). Different versions of ruff will return different results. Run `make lint` to verify whether your code confirms to the guidelines. -Use `make format` to automatically format your code, if it does not conform to `black`'s rules. +Use `make format` to automatically format your code, if it does not conform to `ruff`'s rules. # Maintainers diff --git a/Makefile b/Makefile index 6804e76aa..87b20729d 100644 --- a/Makefile +++ b/Makefile @@ -20,17 +20,13 @@ init: lint: @echo "Running ruff..." ruff check moto tests - @echo "Running black... " - $(eval black_version := $(shell grep "^black==" requirements-dev.txt | sed "s/black==//")) - @echo "(Make sure you have black-$(black_version) installed, as other versions will produce different results)" - black --check moto/ tests/ @echo "Running pylint..." pylint -j 0 moto tests @echo "Running MyPy..." mypy --install-types --non-interactive format: - black moto/ tests/ + ruff format moto/ tests/ ruff check --fix moto/ tests/ test-only: diff --git a/README.md b/README.md index 01f41ccf4..d28e68160 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ [![PyPI](https://img.shields.io/pypi/v/moto.svg)](https://pypi.org/project/moto/) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/moto.svg)](#) [![PyPI - Downloads](https://img.shields.io/pypi/dw/moto.svg)](https://pypistats.org/packages/moto) -[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Code style: Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![Financial Contributors](https://opencollective.com/moto/tiers/badge.svg)](https://opencollective.com/moto) diff --git a/docs/docs/contributing/faq.rst b/docs/docs/contributing/faq.rst index 912b97b39..bebcd2ab2 100644 --- a/docs/docs/contributing/faq.rst +++ b/docs/docs/contributing/faq.rst @@ -13,9 +13,9 @@ FAQ for Developers When running the linter... ############################# -Why does black give different results? +Why does ruff give different results? **************************************** -Different versions of black produce different results. +Different versions of ruff produce different results. The CI system uses the version set in `requirements-dev.txt`. To ensure that our CI passes, please format the code using the same version. diff --git a/docs/docs/contributing/installation.rst b/docs/docs/contributing/installation.rst index d520cabcd..3ef53e59e 100644 --- a/docs/docs/contributing/installation.rst +++ b/docs/docs/contributing/installation.rst @@ -46,13 +46,13 @@ To verify all tests pass for a specific service, for example for `s3`, run these .. code-block:: bash - ruff moto/s3 tests/test_s3 - black --check moto/s3 tests/test_s3 + ruff check moto/s3 tests/test_s3 + ruff format --check moto/s3 tests/test_s3 pylint moto/s3 tests/test_s3 mypy pytest -sv tests/test_s3 -If black fails, you can run the following command to automatically format the offending files: +If ruff fails, you can run the following command to automatically format the offending files: .. code-block:: bash @@ -83,8 +83,8 @@ Then standard development on Moto can proceed, for example: .. code-block:: bash - ruff moto/s3 tests/test_s3 - black --check moto/s3 tests/test_s3 + ruff check moto/s3 tests/test_s3 + ruff format --check moto/s3 tests/test_s3 pylint moto/s3 tests/test_s3 mypy - pytest -sv tests/test_s3 \ No newline at end of file + pytest -sv tests/test_s3 diff --git a/moto/acmpca/exceptions.py b/moto/acmpca/exceptions.py index 2fdcb935f..1107cdb12 100644 --- a/moto/acmpca/exceptions.py +++ b/moto/acmpca/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the acmpca service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/acmpca/models.py b/moto/acmpca/models.py index e46280966..3caf89c71 100644 --- a/moto/acmpca/models.py +++ b/moto/acmpca/models.py @@ -1,4 +1,5 @@ """ACMPCABackend class with methods for supported APIs.""" + import base64 import datetime from typing import Any, Dict, List, Optional, Tuple @@ -135,9 +136,9 @@ class CertificateAuthority(BaseModel): "S3ObjectAcl" not in self.revocation_configuration["CrlConfiguration"] ): - self.revocation_configuration["CrlConfiguration"][ - "S3ObjectAcl" - ] = "PUBLIC_READ" + self.revocation_configuration["CrlConfiguration"]["S3ObjectAcl"] = ( + "PUBLIC_READ" + ) @property def certificate_bytes(self) -> bytes: diff --git a/moto/acmpca/responses.py b/moto/acmpca/responses.py index b26e9568a..d7f3c0f5b 100644 --- a/moto/acmpca/responses.py +++ b/moto/acmpca/responses.py @@ -1,4 +1,5 @@ """Handles incoming acmpca requests, invokes methods, returns responses.""" + import base64 import json diff --git a/moto/acmpca/urls.py b/moto/acmpca/urls.py index e5f6da295..912cd6a2b 100644 --- a/moto/acmpca/urls.py +++ b/moto/acmpca/urls.py @@ -1,4 +1,5 @@ """acmpca base URL and path.""" + from .responses import ACMPCAResponse url_bases = [ diff --git a/moto/amp/responses.py b/moto/amp/responses.py index e0c720f40..b1906b71c 100644 --- a/moto/amp/responses.py +++ b/moto/amp/responses.py @@ -1,4 +1,5 @@ """Handles incoming amp requests, invokes methods, returns responses.""" + import json from typing import Any from urllib.parse import unquote diff --git a/moto/amp/urls.py b/moto/amp/urls.py index 7968707e8..520698ec7 100644 --- a/moto/amp/urls.py +++ b/moto/amp/urls.py @@ -1,4 +1,5 @@ """amp base URL and path.""" + from .responses import PrometheusServiceResponse url_bases = [ diff --git a/moto/apigateway/models.py b/moto/apigateway/models.py index 90d6d49c8..a3ea772e5 100644 --- a/moto/apigateway/models.py +++ b/moto/apigateway/models.py @@ -453,7 +453,8 @@ class Resource(CloudFormationModel): integration_type = integration.integration_type # type: ignore[union-attr] status, result = self.integration_parsers[integration_type].invoke( - request, integration # type: ignore[arg-type] + request, + integration, # type: ignore[arg-type] ) return status, result @@ -704,12 +705,12 @@ class Stage(BaseModel): updated_key = self._method_settings_translations(key) if updated_key is not None: if resource_path_and_method not in self.method_settings: - self.method_settings[ - resource_path_and_method - ] = self._get_default_method_settings() - self.method_settings[resource_path_and_method][ - updated_key - ] = self._convert_to_type(updated_key, value) + self.method_settings[resource_path_and_method] = ( + self._get_default_method_settings() + ) + self.method_settings[resource_path_and_method][updated_key] = ( + self._convert_to_type(updated_key, value) + ) def _get_default_method_settings(self) -> Dict[str, Any]: return { @@ -1004,7 +1005,6 @@ class VpcLink(BaseModel): class RestAPI(CloudFormationModel): - PROP_ID = "id" PROP_NAME = "name" PROP_DESCRIPTION = "description" @@ -1462,7 +1462,6 @@ class Model(BaseModel): class BasePathMapping(BaseModel): - # operations OPERATION_REPLACE = "replace" OPERATION_PATH = "path" @@ -1681,9 +1680,7 @@ class APIGatewayBackend(BaseBackend): api.resources = {} api.default = api.add_child("/") # Add default child - for (path, resource_doc) in sorted( - api_doc["paths"].items(), key=lambda x: x[0] - ): + for path, resource_doc in sorted(api_doc["paths"].items(), key=lambda x: x[0]): # We may want to create a path like /store/inventory # Ensure that /store exists first, so we can use it as a parent ancestors = path.split("/")[ @@ -1715,12 +1712,12 @@ class APIGatewayBackend(BaseBackend): path_part=path[path.rfind("/") + 1 :], ) - for (method_type, method_doc) in resource_doc.items(): + for method_type, method_doc in resource_doc.items(): method_type = method_type.upper() if method_doc.get("x-amazon-apigateway-integration") is None: self.put_method(function_id, resource.id, method_type, None) method_responses = method_doc.get("responses", {}).items() - for (response_code, _) in method_responses: + for response_code, _ in method_responses: self.put_method_response( function_id, resource.id, @@ -2375,7 +2372,6 @@ class APIGatewayBackend(BaseBackend): def update_base_path_mapping( self, domain_name: str, base_path: str, patch_operations: Any ) -> BasePathMapping: - if domain_name not in self.domain_names: raise DomainNameNotFound() diff --git a/moto/apigateway/responses.py b/moto/apigateway/responses.py index 03fc0a335..353a326e5 100644 --- a/moto/apigateway/responses.py +++ b/moto/apigateway/responses.py @@ -39,7 +39,9 @@ class APIGatewayResponse(BaseResponse): ).format(api_key_source=api_key_source), ) - def __validate_endpoint_configuration(self, endpoint_configuration: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def __validate_endpoint_configuration( + self, endpoint_configuration: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] if endpoint_configuration and "types" in endpoint_configuration: invalid_types = list( set(endpoint_configuration["types"]) - set(ENDPOINT_CONFIGURATION_TYPES) @@ -55,7 +57,9 @@ class APIGatewayResponse(BaseResponse): ).format(endpoint_type=invalid_types[0]), ) - def restapis(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def restapis( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -101,7 +105,9 @@ class APIGatewayResponse(BaseResponse): return 200, {}, json.dumps(rest_api.to_dict()) - def __validte_rest_patch_operations(self, patch_operations: List[Dict[str, str]]) -> TYPE_RESPONSE: # type: ignore[return] + def __validte_rest_patch_operations( + self, patch_operations: List[Dict[str, str]] + ) -> TYPE_RESPONSE: # type: ignore[return] for op in patch_operations: path = op["path"] if "apiKeySource" in path: @@ -136,7 +142,9 @@ class APIGatewayResponse(BaseResponse): return 200, {}, json.dumps(rest_api.to_dict()) - def resources(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def resources( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) function_id = self.path.replace("/restapis/", "", 1).split("/")[0] @@ -148,7 +156,9 @@ class APIGatewayResponse(BaseResponse): json.dumps({"item": [resource.to_dict() for resource in resources]}), ) - def gateway_response(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def gateway_response( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "PUT": return self.put_gateway_response() @@ -157,12 +167,16 @@ class APIGatewayResponse(BaseResponse): elif request.method == "DELETE": return self.delete_gateway_response() - def gateway_responses(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def gateway_responses( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "GET": return self.get_gateway_responses() - def resource_individual(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def resource_individual( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) function_id = self.path.replace("/restapis/", "", 1).split("/")[0] resource_id = self.path.split("/")[-1] @@ -254,7 +268,9 @@ class APIGatewayResponse(BaseResponse): return 204, {}, json.dumps(method_response.to_json()) # type: ignore[union-attr] raise Exception(f'Unexpected HTTP method "{self.method}"') - def restapis_authorizers(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def restapis_authorizers( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") restapi_id = url_path_parts[2] @@ -304,7 +320,9 @@ class APIGatewayResponse(BaseResponse): authorizers = self.backend.get_authorizers(restapi_id) return 200, {}, json.dumps({"item": [a.to_json() for a in authorizers]}) - def request_validators(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def request_validators( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") restapi_id = url_path_parts[2] @@ -324,7 +342,9 @@ class APIGatewayResponse(BaseResponse): ) return 201, {}, json.dumps(validator.to_dict()) - def request_validator_individual(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def request_validator_individual( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") restapi_id = url_path_parts[2] @@ -343,7 +363,9 @@ class APIGatewayResponse(BaseResponse): ) return 200, {}, json.dumps(validator.to_dict()) - def authorizers(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def authorizers( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") restapi_id = url_path_parts[2] @@ -362,7 +384,9 @@ class APIGatewayResponse(BaseResponse): self.backend.delete_authorizer(restapi_id, authorizer_id) return 202, {}, "{}" - def restapis_stages(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def restapis_stages( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") function_id = url_path_parts[2] @@ -393,7 +417,9 @@ class APIGatewayResponse(BaseResponse): stages = self.backend.get_stages(function_id) return 200, {}, json.dumps({"item": [s.to_json() for s in stages]}) - def restapis_stages_tags(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def restapis_stages_tags( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") function_id = url_path_parts[4] @@ -411,7 +437,9 @@ class APIGatewayResponse(BaseResponse): stage.tags.pop(tag, None) # type: ignore[union-attr] return 200, {}, json.dumps({"item": ""}) - def stages(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def stages( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") function_id = url_path_parts[2] @@ -448,7 +476,9 @@ class APIGatewayResponse(BaseResponse): } return 200, headers, json.dumps(body).encode("utf-8") - def integrations(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def integrations( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") function_id = url_path_parts[2] @@ -504,7 +534,9 @@ class APIGatewayResponse(BaseResponse): ) return 204, {}, json.dumps(integration_response.to_json()) - def integration_responses(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def integration_responses( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") function_id = url_path_parts[2] @@ -542,7 +574,9 @@ class APIGatewayResponse(BaseResponse): ) return 204, {}, json.dumps(integration_response.to_json()) - def deployments(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def deployments( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) function_id = self.path.replace("/restapis/", "", 1).split("/")[0] @@ -558,7 +592,9 @@ class APIGatewayResponse(BaseResponse): ) return 201, {}, json.dumps(deployment.to_json()) - def individual_deployment(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def individual_deployment( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") function_id = url_path_parts[2] @@ -571,7 +607,9 @@ class APIGatewayResponse(BaseResponse): deployment = self.backend.delete_deployment(function_id, deployment_id) return 202, {}, json.dumps(deployment.to_json()) - def apikeys(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def apikeys( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -611,7 +649,9 @@ class APIGatewayResponse(BaseResponse): return 200, {}, json.dumps(apikey_resp) - def usage_plans(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def usage_plans( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": usage_plan_response = self.backend.create_usage_plan(json.loads(self.body)) @@ -625,7 +665,9 @@ class APIGatewayResponse(BaseResponse): json.dumps({"item": [u.to_json() for u in usage_plans_response]}), ) - def usage_plan_individual(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def usage_plan_individual( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") @@ -644,7 +686,9 @@ class APIGatewayResponse(BaseResponse): ) return 200, {}, json.dumps(usage_plan_response.to_json()) - def usage_plan_keys(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def usage_plan_keys( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") @@ -663,7 +707,9 @@ class APIGatewayResponse(BaseResponse): json.dumps({"item": [u.to_json() for u in usage_plans_response]}), ) - def usage_plan_key_individual(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def usage_plan_key_individual( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") @@ -677,7 +723,9 @@ class APIGatewayResponse(BaseResponse): self.backend.delete_usage_plan_key(usage_plan_id, key_id) return 202, {}, "{}" - def domain_names(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def domain_names( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -732,7 +780,9 @@ class APIGatewayResponse(BaseResponse): msg = f'Method "{self.method}" for API GW domain names not implemented' return 404, {}, json.dumps({"error": msg}) - def models(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def models( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) rest_api_id = self.path.replace("/restapis/", "", 1).split("/")[0] @@ -767,7 +817,9 @@ class APIGatewayResponse(BaseResponse): return 200, {}, json.dumps(model_info.to_json()) return 200, {}, "{}" - def base_path_mappings(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def base_path_mappings( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") @@ -790,8 +842,9 @@ class APIGatewayResponse(BaseResponse): ) return 201, {}, json.dumps(base_path_mapping_resp.to_json()) - def base_path_mapping_individual(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] - + def base_path_mapping_individual( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") @@ -813,7 +866,9 @@ class APIGatewayResponse(BaseResponse): ) return 200, {}, json.dumps(base_path_mapping.to_json()) - def vpc_link(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def vpc_link( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) url_path_parts = self.path.split("/") vpc_link_id = url_path_parts[-1] @@ -825,7 +880,9 @@ class APIGatewayResponse(BaseResponse): vpc_link = self.backend.get_vpc_link(vpc_link_id=vpc_link_id) return 200, {}, json.dumps(vpc_link.to_json()) - def vpc_links(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] + def vpc_links( + self, request: Any, full_url: str, headers: Dict[str, str] + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": diff --git a/moto/apigatewaymanagementapi/models.py b/moto/apigatewaymanagementapi/models.py index 374b99b8f..357789d35 100644 --- a/moto/apigatewaymanagementapi/models.py +++ b/moto/apigatewaymanagementapi/models.py @@ -1,4 +1,5 @@ """ApiGatewayManagementApiBackend class with methods for supported APIs.""" + from collections import defaultdict from typing import Any, Dict diff --git a/moto/apigatewaymanagementapi/responses.py b/moto/apigatewaymanagementapi/responses.py index a3873cb2f..093fb81d9 100644 --- a/moto/apigatewaymanagementapi/responses.py +++ b/moto/apigatewaymanagementapi/responses.py @@ -1,4 +1,5 @@ """Handles incoming apigatewaymanagementapi requests, invokes methods, returns responses.""" + import json from typing import Any diff --git a/moto/apigatewaymanagementapi/urls.py b/moto/apigatewaymanagementapi/urls.py index f4a611047..f72047201 100644 --- a/moto/apigatewaymanagementapi/urls.py +++ b/moto/apigatewaymanagementapi/urls.py @@ -1,4 +1,5 @@ """apigatewaymanagementapi base URL and path.""" + from .responses import ApiGatewayManagementApiResponse url_bases = [r"https?://execute-api\.(.+)\.amazonaws\.com"] diff --git a/moto/apigatewayv2/models.py b/moto/apigatewayv2/models.py index 0b396a214..a53b58cc0 100644 --- a/moto/apigatewayv2/models.py +++ b/moto/apigatewayv2/models.py @@ -1,4 +1,5 @@ """ApiGatewayV2Backend class with methods for supported APIs.""" + import hashlib import string from typing import Any, Dict, List, Optional, Union diff --git a/moto/apigatewayv2/responses.py b/moto/apigatewayv2/responses.py index 3d76287cb..16a00b37a 100644 --- a/moto/apigatewayv2/responses.py +++ b/moto/apigatewayv2/responses.py @@ -1,4 +1,5 @@ """Handles incoming apigatewayv2 requests, invokes methods, returns responses.""" + import json from typing import Any from urllib.parse import unquote @@ -62,7 +63,9 @@ class ApiGatewayV2Response(BaseResponse): if self.method == "DELETE": return self.delete_cors_configuration() - def route_request_parameter(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def route_request_parameter( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "DELETE": @@ -102,7 +105,9 @@ class ApiGatewayV2Response(BaseResponse): if self.method == "POST": return self.create_integration() - def integration_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def integration_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "DELETE": @@ -112,7 +117,9 @@ class ApiGatewayV2Response(BaseResponse): if self.method == "PATCH": return self.update_integration_response() - def integration_responses(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def integration_responses( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -138,7 +145,9 @@ class ApiGatewayV2Response(BaseResponse): if self.method == "POST": return self.create_route() - def route_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def route_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "DELETE": @@ -146,7 +155,9 @@ class ApiGatewayV2Response(BaseResponse): if self.method == "GET": return self.get_route_response() - def route_responses(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def route_responses( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": diff --git a/moto/apigatewayv2/urls.py b/moto/apigatewayv2/urls.py index 56e807f18..63bc79fb6 100644 --- a/moto/apigatewayv2/urls.py +++ b/moto/apigatewayv2/urls.py @@ -1,4 +1,5 @@ """apigatewayv2 base URL and path.""" + from .responses import ApiGatewayV2Response url_bases = [ diff --git a/moto/appconfig/exceptions.py b/moto/appconfig/exceptions.py index ec0a6967b..aa8666844 100644 --- a/moto/appconfig/exceptions.py +++ b/moto/appconfig/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the appconfig service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/appconfig/models.py b/moto/appconfig/models.py index 50019eb15..7a8a3a03d 100644 --- a/moto/appconfig/models.py +++ b/moto/appconfig/models.py @@ -189,9 +189,9 @@ class AppConfigBackend(BaseBackend): _type=_type, ) self.tag_resource(config_profile.arn, tags) - self.get_application(application_id).config_profiles[ - config_profile.id - ] = config_profile + self.get_application(application_id).config_profiles[config_profile.id] = ( + config_profile + ) return config_profile def delete_configuration_profile(self, app_id: str, config_profile_id: str) -> None: diff --git a/moto/appconfig/urls.py b/moto/appconfig/urls.py index b1a9c1662..a9c71903b 100644 --- a/moto/appconfig/urls.py +++ b/moto/appconfig/urls.py @@ -1,4 +1,5 @@ """appconfig base URL and path.""" + from .responses import AppConfigResponse url_bases = [ diff --git a/moto/applicationautoscaling/responses.py b/moto/applicationautoscaling/responses.py index 9733ba8c4..fdf2358d7 100644 --- a/moto/applicationautoscaling/responses.py +++ b/moto/applicationautoscaling/responses.py @@ -218,9 +218,9 @@ def _build_policy(p: FakeApplicationAutoscalingPolicy) -> Dict[str, Any]: if p.policy_type == "StepScaling": response["StepScalingPolicyConfiguration"] = p.step_scaling_policy_configuration elif p.policy_type == "TargetTrackingScaling": - response[ - "TargetTrackingScalingPolicyConfiguration" - ] = p.target_tracking_scaling_policy_configuration + response["TargetTrackingScalingPolicyConfiguration"] = ( + p.target_tracking_scaling_policy_configuration + ) return response diff --git a/moto/appsync/responses.py b/moto/appsync/responses.py index e7318fe57..a3debe02f 100644 --- a/moto/appsync/responses.py +++ b/moto/appsync/responses.py @@ -1,4 +1,5 @@ """Handles incoming appsync requests, invokes methods, returns responses.""" + import json from typing import Any from urllib.parse import unquote @@ -26,7 +27,9 @@ class AppSyncResponse(BaseResponse): if request.method == "GET": return self.list_graphql_apis() - def graph_ql_individual(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def graph_ql_individual( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "GET": return self.get_graphql_api() @@ -42,14 +45,18 @@ class AppSyncResponse(BaseResponse): if request.method == "GET": return self.list_api_keys() - def schemacreation(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def schemacreation( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "POST": return self.start_schema_creation() if request.method == "GET": return self.get_schema_creation_status() - def api_key_individual(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def api_key_individual( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "DELETE": return self.delete_api_key() diff --git a/moto/appsync/urls.py b/moto/appsync/urls.py index 84116519c..87fcc25e1 100644 --- a/moto/appsync/urls.py +++ b/moto/appsync/urls.py @@ -1,4 +1,5 @@ """appsync base URL and path.""" + from .responses import AppSyncResponse url_bases = [ diff --git a/moto/athena/models.py b/moto/athena/models.py index a7d5e4a4b..38d4449db 100644 --- a/moto/athena/models.py +++ b/moto/athena/models.py @@ -36,7 +36,6 @@ class TaggableResourceMixin: class WorkGroup(TaggableResourceMixin, BaseModel): - resource_type = "workgroup" state = "ENABLED" diff --git a/moto/autoscaling/models.py b/moto/autoscaling/models.py index de47e2160..5d03c85bf 100644 --- a/moto/autoscaling/models.py +++ b/moto/autoscaling/models.py @@ -393,7 +393,7 @@ class FailedScheduledUpdateGroupActionRequest: def set_string_propagate_at_launch_booleans_on_tags( - tags: List[Dict[str, Any]] + tags: List[Dict[str, Any]], ) -> List[Dict[str, Any]]: bool_to_string = {True: "true", False: "false"} for tag in tags: @@ -883,7 +883,9 @@ class AutoScalingBackend(BaseBackend): self.elbv2_backend: ELBv2Backend = elbv2_backends[self.account_id][region_name] @staticmethod - def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, Any]]: # type: ignore[misc] + def default_vpc_endpoint_service( + service_region: str, zones: List[str] + ) -> List[Dict[str, Any]]: # type: ignore[misc] """Default VPC endpoint service.""" return BaseBackend.default_vpc_endpoint_service_factory( service_region, zones, "autoscaling" @@ -1195,7 +1197,6 @@ class AutoScalingBackend(BaseBackend): def describe_auto_scaling_groups( self, names: List[str], filters: Optional[List[Dict[str, str]]] = None ) -> List[FakeAutoScalingGroup]: - groups = list(self.autoscaling_groups.values()) if filters: diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index 5df47ebd5..bba211abc 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -1271,7 +1271,6 @@ class EventSourceMapping(CloudFormationModel): return event_source_arn.split(":")[2].lower() def _validate_event_source(self, event_source_arn: str) -> bool: - valid_services = ("dynamodb", "kinesis", "sqs") service = self._get_service_source_from_arn(event_source_arn) return service in valid_services @@ -1418,9 +1417,9 @@ class LambdaStorage(object): def __init__(self, region_name: str, account_id: str): # Format 'func_name' {'versions': []} self._functions: Dict[str, Any] = {} - self._arns: weakref.WeakValueDictionary[ - str, LambdaFunction - ] = weakref.WeakValueDictionary() + self._arns: weakref.WeakValueDictionary[str, LambdaFunction] = ( + weakref.WeakValueDictionary() + ) self.region_name = region_name self.account_id = account_id @@ -1758,9 +1757,9 @@ class LambdaStorage(object): class LayerStorage(object): def __init__(self) -> None: self._layers: Dict[str, Layer] = {} - self._arns: weakref.WeakValueDictionary[ - str, LambdaFunction - ] = weakref.WeakValueDictionary() + self._arns: weakref.WeakValueDictionary[str, LambdaFunction] = ( + weakref.WeakValueDictionary() + ) def _find_layer_by_name_or_arn(self, name_or_arn: str) -> Layer: if name_or_arn in self._layers: diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py index c7a12d6a4..17d108a3a 100644 --- a/moto/awslambda/responses.py +++ b/moto/awslambda/responses.py @@ -89,7 +89,9 @@ class LambdaResponse(BaseResponse): if request.method == "GET": return self._list_layers() - def layers_version(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def layers_version( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) layer_name = unquote(self.path.split("/")[-3]) layer_version = self.path.split("/")[-1] @@ -98,7 +100,9 @@ class LambdaResponse(BaseResponse): elif request.method == "GET": return self._get_layer_version(layer_name, layer_version) - def layers_versions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def layers_versions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "GET": return self._get_layer_versions() @@ -186,7 +190,9 @@ class LambdaResponse(BaseResponse): else: raise ValueError("Cannot handle request") - def code_signing_config(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def code_signing_config( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "GET": return self._get_code_signing_config() @@ -206,7 +212,9 @@ class LambdaResponse(BaseResponse): else: raise ValueError("Cannot handle request") - def function_url_config(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def function_url_config( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] http_method = request.method self.setup_class(request, full_url, headers) @@ -409,7 +417,9 @@ class LambdaResponse(BaseResponse): return 204, {}, "" @staticmethod - def _set_configuration_qualifier(configuration: Dict[str, Any], function_name: str, qualifier: str) -> Dict[str, Any]: # type: ignore[misc] + def _set_configuration_qualifier( + configuration: Dict[str, Any], function_name: str, qualifier: str + ) -> Dict[str, Any]: # type: ignore[misc] # Qualifier may be explicitly passed or part of function name or ARN, extract it here if function_name.startswith("arn:aws"): # Extract from ARN diff --git a/moto/awslambda_simple/models.py b/moto/awslambda_simple/models.py index 1be101c8f..12381a98c 100644 --- a/moto/awslambda_simple/models.py +++ b/moto/awslambda_simple/models.py @@ -23,7 +23,6 @@ class LambdaSimpleBackend(LambdaBackend): headers: Any, response_headers: Any, ) -> Optional[Union[str, bytes]]: - default_result = "Simple Lambda happy path OK" if self.lambda_simple_results_queue: default_result = self.lambda_simple_results_queue.pop(0) diff --git a/moto/backends.py b/moto/backends.py index 9ebedd0e9..7a5d381c0 100644 --- a/moto/backends.py +++ b/moto/backends.py @@ -309,12 +309,10 @@ def _import_backend( return getattr(module, backends_name) -# fmt: off -# This is more or less the dummy-implementation style that's currently in black's preview -# style. It should be live in black v24.0+, at which point we should remove the # fmt: off -# directive. @overload -def get_backend(name: "Literal['acm']") -> "BackendDict[AWSCertificateManagerBackend]": ... +def get_backend( + name: "Literal['acm']", +) -> "BackendDict[AWSCertificateManagerBackend]": ... @overload def get_backend(name: "Literal['acm-pca']") -> "BackendDict[ACMPCABackend]": ... @overload @@ -322,19 +320,27 @@ def get_backend(name: "Literal['amp']") -> "BackendDict[PrometheusServiceBackend @overload def get_backend(name: "Literal['apigateway']") -> "BackendDict[APIGatewayBackend]": ... @overload -def get_backend(name: "Literal['apigatewaymanagementapi']") -> "BackendDict[ApiGatewayManagementApiBackend]": ... +def get_backend( + name: "Literal['apigatewaymanagementapi']", +) -> "BackendDict[ApiGatewayManagementApiBackend]": ... @overload -def get_backend(name: "Literal['apigatewayv2']") -> "BackendDict[ApiGatewayV2Backend]": ... +def get_backend( + name: "Literal['apigatewayv2']", +) -> "BackendDict[ApiGatewayV2Backend]": ... @overload def get_backend(name: "Literal['appconfig']") -> "BackendDict[AppConfigBackend]": ... @overload -def get_backend(name: "Literal['applicationautoscaling']") -> "BackendDict[ApplicationAutoscalingBackend]": ... +def get_backend( + name: "Literal['applicationautoscaling']", +) -> "BackendDict[ApplicationAutoscalingBackend]": ... @overload def get_backend(name: "Literal['appsync']") -> "BackendDict[AppSyncBackend]": ... @overload def get_backend(name: "Literal['athena']") -> "BackendDict[AthenaBackend]": ... @overload -def get_backend(name: "Literal['autoscaling']") -> "BackendDict[AutoScalingBackend]": ... +def get_backend( + name: "Literal['autoscaling']", +) -> "BackendDict[AutoScalingBackend]": ... @overload def get_backend(name: "Literal['batch']") -> "BackendDict[BatchBackend]": ... @overload @@ -342,7 +348,9 @@ def get_backend(name: "Literal['budgets']") -> "BackendDict[BudgetsBackend]": .. @overload def get_backend(name: "Literal['ce']") -> "BackendDict[CostExplorerBackend]": ... @overload -def get_backend(name: "Literal['cloudformation']") -> "BackendDict[CloudFormationBackend]": ... +def get_backend( + name: "Literal['cloudformation']", +) -> "BackendDict[CloudFormationBackend]": ... @overload def get_backend(name: "Literal['cloudfront']") -> "BackendDict[CloudFrontBackend]": ... @overload @@ -354,9 +362,13 @@ def get_backend(name: "Literal['codebuild']") -> "BackendDict[CodeBuildBackend]" @overload def get_backend(name: "Literal['codecommit']") -> "BackendDict[CodeCommitBackend]": ... @overload -def get_backend(name: "Literal['codepipeline']") -> "BackendDict[CodePipelineBackend]": ... +def get_backend( + name: "Literal['codepipeline']", +) -> "BackendDict[CodePipelineBackend]": ... @overload -def get_backend(name: "Literal['cognito-identity']") -> "BackendDict[CognitoIdentityBackend]": ... +def get_backend( + name: "Literal['cognito-identity']", +) -> "BackendDict[CognitoIdentityBackend]": ... @overload def get_backend(name: "Literal['cognito-idp']") -> "BackendDict[CognitoIdpBackend]": ... @overload @@ -366,41 +378,57 @@ def get_backend(name: "Literal['config']") -> "BackendDict[ConfigBackend]": ... @overload def get_backend(name: "Literal['databrew']") -> "BackendDict[DataBrewBackend]": ... @overload -def get_backend(name: "Literal['datapipeline']") -> "BackendDict[DataPipelineBackend]": ... +def get_backend( + name: "Literal['datapipeline']", +) -> "BackendDict[DataPipelineBackend]": ... @overload def get_backend(name: "Literal['datasync']") -> "BackendDict[DataSyncBackend]": ... @overload def get_backend(name: "Literal['dax']") -> "BackendDict[DAXBackend]": ... @overload -def get_backend(name: "Literal['dms']") -> "BackendDict[DatabaseMigrationServiceBackend]": ... +def get_backend( + name: "Literal['dms']", +) -> "BackendDict[DatabaseMigrationServiceBackend]": ... @overload def get_backend(name: "Literal['ds']") -> "BackendDict[DirectoryServiceBackend]": ... @overload def get_backend(name: "Literal['dynamodb']") -> "BackendDict[DynamoDBBackend]": ... @overload -def get_backend(name: "Literal['dynamodb_v20111205']") -> "BackendDict[DynamoDBBackend_v20111205]": ... +def get_backend( + name: "Literal['dynamodb_v20111205']", +) -> "BackendDict[DynamoDBBackend_v20111205]": ... @overload -def get_backend(name: "Literal['dynamodbstreams']") -> "BackendDict[DynamoDBStreamsBackend]": ... +def get_backend( + name: "Literal['dynamodbstreams']", +) -> "BackendDict[DynamoDBStreamsBackend]": ... @overload def get_backend(name: "Literal['ebs']") -> "BackendDict[EBSBackend]": ... @overload def get_backend(name: "Literal['ec2']") -> "BackendDict[EC2Backend]": ... @overload -def get_backend(name: "Literal['ec2instanceconnect']") -> "BackendDict[Ec2InstanceConnectBackend]": ... +def get_backend( + name: "Literal['ec2instanceconnect']", +) -> "BackendDict[Ec2InstanceConnectBackend]": ... @overload def get_backend(name: "Literal['ecr']") -> "BackendDict[ECRBackend]": ... @overload -def get_backend(name: "Literal['ecs']") -> "BackendDict[EC2ContainerServiceBackend]": ... +def get_backend( + name: "Literal['ecs']", +) -> "BackendDict[EC2ContainerServiceBackend]": ... @overload def get_backend(name: "Literal['efs']") -> "BackendDict[EFSBackend]": ... @overload def get_backend(name: "Literal['eks']") -> "BackendDict[EKSBackend]": ... @overload -def get_backend(name: "Literal['elasticache']") -> "BackendDict[ElastiCacheBackend]": ... +def get_backend( + name: "Literal['elasticache']", +) -> "BackendDict[ElastiCacheBackend]": ... @overload def get_backend(name: "Literal['elasticbeanstalk']") -> "BackendDict[EBBackend]": ... @overload -def get_backend(name: "Literal['elastictranscoder']") -> "BackendDict[ElasticTranscoderBackend]": ... +def get_backend( + name: "Literal['elastictranscoder']", +) -> "BackendDict[ElasticTranscoderBackend]": ... @overload def get_backend(name: "Literal['elb']") -> "BackendDict[ELBBackend]": ... @overload @@ -408,11 +436,17 @@ def get_backend(name: "Literal['elbv2']") -> "BackendDict[ELBv2Backend]": ... @overload def get_backend(name: "Literal['emr']") -> "BackendDict[ElasticMapReduceBackend]": ... @overload -def get_backend(name: "Literal['emr-containers']") -> "BackendDict[EMRContainersBackend]": ... +def get_backend( + name: "Literal['emr-containers']", +) -> "BackendDict[EMRContainersBackend]": ... @overload -def get_backend(name: "Literal['emr-serverless']") -> "BackendDict[EMRServerlessBackend]": ... +def get_backend( + name: "Literal['emr-serverless']", +) -> "BackendDict[EMRServerlessBackend]": ... @overload -def get_backend(name: "Literal['es']") -> "BackendDict[ElasticsearchServiceBackend]": ... +def get_backend( + name: "Literal['es']", +) -> "BackendDict[ElasticsearchServiceBackend]": ... @overload def get_backend(name: "Literal['events']") -> "BackendDict[EventsBackend]": ... @overload @@ -430,11 +464,15 @@ def get_backend(name: "Literal['guardduty']") -> "BackendDict[GuardDutyBackend]" @overload def get_backend(name: "Literal['iam']") -> "BackendDict[IAMBackend]": ... @overload -def get_backend(name: "Literal['identitystore']") -> "BackendDict[IdentityStoreBackend]": ... +def get_backend( + name: "Literal['identitystore']", +) -> "BackendDict[IdentityStoreBackend]": ... @overload def get_backend(name: "Literal['inspector2']") -> "BackendDict[Inspector2Backend]": ... @overload -def get_backend(name: "Literal['instance_metadata']") -> "BackendDict[InstanceMetadataBackend]": ... +def get_backend( + name: "Literal['instance_metadata']", +) -> "BackendDict[InstanceMetadataBackend]": ... @overload def get_backend(name: "Literal['iot']") -> "BackendDict[IoTBackend]": ... @overload @@ -444,31 +482,47 @@ def get_backend(name: "Literal['ivs']") -> "BackendDict[IVSBackend]": ... @overload def get_backend(name: "Literal['kinesis']") -> "BackendDict[KinesisBackend]": ... @overload -def get_backend(name: "Literal['kinesisvideo']") -> "BackendDict[KinesisVideoBackend]": ... +def get_backend( + name: "Literal['kinesisvideo']", +) -> "BackendDict[KinesisVideoBackend]": ... @overload -def get_backend(name: "Literal['kinesis-video-archived-media']") -> "BackendDict[KinesisVideoArchivedMediaBackend]": ... +def get_backend( + name: "Literal['kinesis-video-archived-media']", +) -> "BackendDict[KinesisVideoArchivedMediaBackend]": ... @overload def get_backend(name: "Literal['kms']") -> "BackendDict[KmsBackend]": ... @overload -def get_backend(name: "Literal['lakeformation']") -> "BackendDict[LakeFormationBackend]": ... +def get_backend( + name: "Literal['lakeformation']", +) -> "BackendDict[LakeFormationBackend]": ... @overload def get_backend(name: "Literal['lambda']") -> "BackendDict[LambdaBackend]": ... @overload def get_backend(name: "Literal['logs']") -> "BackendDict[LogsBackend]": ... @overload -def get_backend(name: "Literal['managedblockchain']") -> "BackendDict[ManagedBlockchainBackend]": ... +def get_backend( + name: "Literal['managedblockchain']", +) -> "BackendDict[ManagedBlockchainBackend]": ... @overload -def get_backend(name: "Literal['mediaconnect']") -> "BackendDict[MediaConnectBackend]": ... +def get_backend( + name: "Literal['mediaconnect']", +) -> "BackendDict[MediaConnectBackend]": ... @overload def get_backend(name: "Literal['medialive']") -> "BackendDict[MediaLiveBackend]": ... @overload -def get_backend(name: "Literal['mediapackage']") -> "BackendDict[MediaPackageBackend]": ... +def get_backend( + name: "Literal['mediapackage']", +) -> "BackendDict[MediaPackageBackend]": ... @overload def get_backend(name: "Literal['mediastore']") -> "BackendDict[MediaStoreBackend]": ... @overload -def get_backend(name: "Literal['mediastore-data']") -> "BackendDict[MediaStoreDataBackend]": ... +def get_backend( + name: "Literal['mediastore-data']", +) -> "BackendDict[MediaStoreDataBackend]": ... @overload -def get_backend(name: "Literal['meteringmarketplace']") -> "BackendDict[MeteringMarketplaceBackend]": ... +def get_backend( + name: "Literal['meteringmarketplace']", +) -> "BackendDict[MeteringMarketplaceBackend]": ... @overload def get_backend(name: "Literal['moto_api']") -> "BackendDict[MotoAPIBackend]": ... @overload @@ -476,13 +530,19 @@ def get_backend(name: "Literal['mq']") -> "BackendDict[MQBackend]": ... @overload def get_backend(name: "Literal['neptune']") -> "BackendDict[NeptuneBackend]": ... @overload -def get_backend(name: "Literal['opensearch']") -> "BackendDict[OpenSearchServiceBackend]": ... +def get_backend( + name: "Literal['opensearch']", +) -> "BackendDict[OpenSearchServiceBackend]": ... @overload def get_backend(name: "Literal['opsworks']") -> "BackendDict[OpsWorksBackend]": ... @overload -def get_backend(name: "Literal['organizations']") -> "BackendDict[OrganizationsBackend]": ... +def get_backend( + name: "Literal['organizations']", +) -> "BackendDict[OrganizationsBackend]": ... @overload -def get_backend(name: "Literal['personalize']") -> "BackendDict[PersonalizeBackend]": ... +def get_backend( + name: "Literal['personalize']", +) -> "BackendDict[PersonalizeBackend]": ... @overload def get_backend(name: "Literal['pinpoint']") -> "BackendDict[PinpointBackend]": ... @overload @@ -490,31 +550,49 @@ def get_backend(name: "Literal['polly']") -> "BackendDict[PollyBackend]": ... @overload def get_backend(name: "Literal['quicksight']") -> "BackendDict[QuickSightBackend]": ... @overload -def get_backend(name: "Literal['ram']") -> "BackendDict[ResourceAccessManagerBackend]": ... +def get_backend( + name: "Literal['ram']", +) -> "BackendDict[ResourceAccessManagerBackend]": ... @overload def get_backend(name: "Literal['rds']") -> "BackendDict[RDSBackend]": ... @overload -def get_backend(name: "Literal['rds-data']") -> "BackendDict[RDSDataServiceBackend]": ... +def get_backend( + name: "Literal['rds-data']", +) -> "BackendDict[RDSDataServiceBackend]": ... @overload def get_backend(name: "Literal['redshift']") -> "BackendDict[RedshiftBackend]": ... @overload -def get_backend(name: "Literal['redshift-data']") -> "BackendDict[RedshiftDataAPIServiceBackend]": ... +def get_backend( + name: "Literal['redshift-data']", +) -> "BackendDict[RedshiftDataAPIServiceBackend]": ... @overload -def get_backend(name: "Literal['rekognition']") -> "BackendDict[RekognitionBackend]": ... +def get_backend( + name: "Literal['rekognition']", +) -> "BackendDict[RekognitionBackend]": ... @overload -def get_backend(name: "Literal['resiliencehub']") -> "BackendDict[ResilienceHubBackend]": ... +def get_backend( + name: "Literal['resiliencehub']", +) -> "BackendDict[ResilienceHubBackend]": ... @overload -def get_backend(name: "Literal['resource-groups']") -> "BackendDict[ResourceGroupsBackend]": ... +def get_backend( + name: "Literal['resource-groups']", +) -> "BackendDict[ResourceGroupsBackend]": ... @overload -def get_backend(name: "Literal['resourcegroupstaggingapi']") -> "BackendDict[ResourceGroupsTaggingAPIBackend]": ... +def get_backend( + name: "Literal['resourcegroupstaggingapi']", +) -> "BackendDict[ResourceGroupsTaggingAPIBackend]": ... @overload def get_backend(name: "Literal['robomaker']") -> "BackendDict[RoboMakerBackend]": ... @overload def get_backend(name: "Literal['route53']") -> "BackendDict[Route53Backend]": ... @overload -def get_backend(name: "Literal['route53resolver']") -> "BackendDict[Route53ResolverBackend]": ... +def get_backend( + name: "Literal['route53resolver']", +) -> "BackendDict[Route53ResolverBackend]": ... @overload -def get_backend(name: "Literal['route53domains']") -> "BackendDict[Route53DomainsBackend]": ... +def get_backend( + name: "Literal['route53domains']", +) -> "BackendDict[Route53DomainsBackend]": ... @overload def get_backend(name: "Literal['s3']") -> "BackendDict[S3Backend]": ... @overload @@ -522,19 +600,31 @@ def get_backend(name: "Literal['s3bucket_path']") -> "BackendDict[S3Backend]": . @overload def get_backend(name: "Literal['s3control']") -> "BackendDict[S3ControlBackend]": ... @overload -def get_backend(name: "Literal['sagemaker']") -> "BackendDict[SageMakerModelBackend]": ... +def get_backend( + name: "Literal['sagemaker']", +) -> "BackendDict[SageMakerModelBackend]": ... @overload -def get_backend(name: "Literal['sagemaker-runtime']") -> "BackendDict[SageMakerRuntimeBackend]": ... +def get_backend( + name: "Literal['sagemaker-runtime']", +) -> "BackendDict[SageMakerRuntimeBackend]": ... @overload -def get_backend(name: "Literal['scheduler']") -> "BackendDict[EventBridgeSchedulerBackend]": ... +def get_backend( + name: "Literal['scheduler']", +) -> "BackendDict[EventBridgeSchedulerBackend]": ... @overload def get_backend(name: "Literal['sdb']") -> "BackendDict[SimpleDBBackend]": ... @overload -def get_backend(name: "Literal['secretsmanager']") -> "BackendDict[SecretsManagerBackend]": ... +def get_backend( + name: "Literal['secretsmanager']", +) -> "BackendDict[SecretsManagerBackend]": ... @overload -def get_backend(name: "Literal['servicediscovery']") -> "BackendDict[ServiceDiscoveryBackend]": ... +def get_backend( + name: "Literal['servicediscovery']", +) -> "BackendDict[ServiceDiscoveryBackend]": ... @overload -def get_backend(name: "Literal['service-quotas']") -> "BackendDict[ServiceQuotasBackend]": ... +def get_backend( + name: "Literal['service-quotas']", +) -> "BackendDict[ServiceQuotasBackend]": ... @overload def get_backend(name: "Literal['ses']") -> "BackendDict[SESBackend]": ... @overload @@ -546,11 +636,15 @@ def get_backend(name: "Literal['sns']") -> "BackendDict[SNSBackend]": ... @overload def get_backend(name: "Literal['sqs']") -> "BackendDict[SQSBackend]": ... @overload -def get_backend(name: "Literal['ssm']") -> "BackendDict[SimpleSystemManagerBackend]": ... +def get_backend( + name: "Literal['ssm']", +) -> "BackendDict[SimpleSystemManagerBackend]": ... @overload def get_backend(name: "Literal['sso-admin']") -> "BackendDict[SSOAdminBackend]": ... @overload -def get_backend(name: "Literal['stepfunctions']") -> "BackendDict[StepFunctionBackend]": ... +def get_backend( + name: "Literal['stepfunctions']", +) -> "BackendDict[StepFunctionBackend]": ... @overload def get_backend(name: "Literal['sts']") -> "BackendDict[STSBackend]": ... @overload @@ -560,7 +654,9 @@ def get_backend(name: "Literal['swf']") -> "BackendDict[SWFBackend]": ... @overload def get_backend(name: "Literal['textract']") -> "BackendDict[TextractBackend]": ... @overload -def get_backend(name: "Literal['timestream-write']") -> "BackendDict[TimestreamWriteBackend]": ... +def get_backend( + name: "Literal['timestream-write']", +) -> "BackendDict[TimestreamWriteBackend]": ... @overload def get_backend(name: "Literal['transcribe']") -> "BackendDict[TranscribeBackend]": ... @overload @@ -569,10 +665,10 @@ def get_backend(name: "Literal['wafv2']") -> "BackendDict[WAFV2Backend]": ... def get_backend(name: "Literal['workspaces']") -> "BackendDict[WorkSpacesBackend]": ... @overload def get_backend(name: "Literal['xray']") -> "BackendDict[XRayBackend]": ... -# fmt: on def get_backend(name: SERVICE_NAMES) -> "BackendDict[SERVICE_BACKEND]": + # fmt: on safe_name = name.replace("-", "") return _import_backend( ALT_SERVICE_NAMES.get(safe_name, safe_name), diff --git a/moto/backup/exceptions.py b/moto/backup/exceptions.py index dc0e1ad12..8fb96abff 100644 --- a/moto/backup/exceptions.py +++ b/moto/backup/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the backup service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/backup/models.py b/moto/backup/models.py index 83715ad29..5f8d3011f 100644 --- a/moto/backup/models.py +++ b/moto/backup/models.py @@ -17,7 +17,6 @@ class Plan(BaseModel): creator_request_id: str, backend: "BackupBackend", ): - self.backup_plan_id = str(mock_random.uuid4()) self.backup_plan_arn = f"arn:aws:backup:{backend.region_name}:{backend.account_id}:backup-plan:{self.backup_plan_id}" self.creation_date = unix_time() @@ -137,7 +136,6 @@ class BackupBackend(BaseBackend): backup_plan_tags: Dict[str, str], creator_request_id: str, ) -> Plan: - if backup_plan["BackupPlanName"] in list( p.backup_plan["BackupPlanName"] for p in list(self.plans.values()) ): @@ -197,7 +195,6 @@ class BackupBackend(BaseBackend): encryption_key_arn: str, creator_request_id: str, ) -> Vault: - if backup_vault_name in self.vaults: raise AlreadyExistsException( msg="Backup vault with the same name already exists" diff --git a/moto/backup/responses.py b/moto/backup/responses.py index 42483740b..8610faea4 100644 --- a/moto/backup/responses.py +++ b/moto/backup/responses.py @@ -1,4 +1,5 @@ """Handles incoming backup requests, invokes methods, returns responses.""" + import json from urllib.parse import unquote diff --git a/moto/backup/urls.py b/moto/backup/urls.py index 5b4b7c84b..72bc1f7c8 100644 --- a/moto/backup/urls.py +++ b/moto/backup/urls.py @@ -1,4 +1,5 @@ """backup base URL and path.""" + from .responses import BackupResponse url_bases = [ diff --git a/moto/batch/models.py b/moto/batch/models.py index 6acb78eec..59c6eb955 100644 --- a/moto/batch/models.py +++ b/moto/batch/models.py @@ -768,9 +768,9 @@ class Job(threading.Thread, BaseModel, DockerModel, ManagedState): environment = kwargs["environment"] environment["MOTO_HOST"] = settings.moto_server_host() environment["MOTO_PORT"] = settings.moto_server_port() - environment[ - "MOTO_HTTP_ENDPOINT" - ] = f'{environment["MOTO_HOST"]}:{environment["MOTO_PORT"]}' + environment["MOTO_HTTP_ENDPOINT"] = ( + f'{environment["MOTO_HOST"]}:{environment["MOTO_PORT"]}' + ) if network_name: kwargs["network"] = network_name diff --git a/moto/budgets/exceptions.py b/moto/budgets/exceptions.py index 03d073383..9f5551540 100644 --- a/moto/budgets/exceptions.py +++ b/moto/budgets/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the budgets service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/ce/exceptions.py b/moto/ce/exceptions.py index 8ae46d45f..5bfbc0e4e 100644 --- a/moto/ce/exceptions.py +++ b/moto/ce/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the ce service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/ce/responses.py b/moto/ce/responses.py index 9ade12190..7206aec90 100644 --- a/moto/ce/responses.py +++ b/moto/ce/responses.py @@ -1,4 +1,5 @@ """Handles incoming ce requests, invokes methods, returns responses.""" + import json from moto.core.responses import BaseResponse diff --git a/moto/ce/urls.py b/moto/ce/urls.py index fd2a44752..12919dd16 100644 --- a/moto/ce/urls.py +++ b/moto/ce/urls.py @@ -1,4 +1,5 @@ """ce base URL and path.""" + from .responses import CostExplorerResponse url_bases = [ diff --git a/moto/cloudformation/models.py b/moto/cloudformation/models.py index 8d48d460e..e9258d85d 100644 --- a/moto/cloudformation/models.py +++ b/moto/cloudformation/models.py @@ -117,7 +117,6 @@ class FakeStackSet(BaseModel): regions: List[str], operation_id: str, ) -> Dict[str, Any]: - self.template = template or self.template self.description = description if description is not None else self.description self.parameters = parameters or self.parameters @@ -447,7 +446,6 @@ class FakeStack(CloudFormationModel): resource_status_reason: Optional[str] = None, resource_properties: Optional[str] = None, ) -> None: - event = FakeEvent( stack_id=self.stack_id, stack_name=self.name, diff --git a/moto/cloudformation/parsing.py b/moto/cloudformation/parsing.py index 0a6b913b6..8dbb9219c 100644 --- a/moto/cloudformation/parsing.py +++ b/moto/cloudformation/parsing.py @@ -334,15 +334,17 @@ def parse_resource( def parse_resource_and_generate_name( logical_id: str, resource_json: Dict[str, Any], resources_map: "ResourceMap" ) -> Tuple[Type[CloudFormationModel], Dict[str, Any], str]: - resource_tuple: Tuple[ - Type[CloudFormationModel], Dict[str, Any], str - ] = parse_resource(resource_json, resources_map) + resource_tuple: Tuple[Type[CloudFormationModel], Dict[str, Any], str] = ( + parse_resource(resource_json, resources_map) + ) if not resource_tuple: return None resource_class, resource_json, resource_type = resource_tuple generated_resource_name = generate_resource_name( - resource_type, resources_map["AWS::StackName"], logical_id # type: ignore[arg-type] + resource_type, + resources_map["AWS::StackName"], + logical_id, # type: ignore[arg-type] ) resource_name_property = resource_name_property_from_type(resource_type) @@ -373,9 +375,9 @@ def parse_and_create_resource( return None resource_type = resource_json["Type"] - resource_tuple: Tuple[ - Type[CloudFormationModel], Dict[str, Any], str - ] = parse_resource_and_generate_name(logical_id, resource_json, resources_map) + resource_tuple: Tuple[Type[CloudFormationModel], Dict[str, Any], str] = ( + parse_resource_and_generate_name(logical_id, resource_json, resources_map) + ) if not resource_tuple: return None resource_class, resource_json, resource_physical_name = resource_tuple @@ -399,9 +401,9 @@ def parse_and_update_resource( account_id: str, region_name: str, ) -> Optional[CF_MODEL]: - resource_tuple: Optional[ - Tuple[Type[CloudFormationModel], Dict[str, Any], str] - ] = parse_resource_and_generate_name(logical_id, resource_json, resources_map) + resource_tuple: Optional[Tuple[Type[CloudFormationModel], Dict[str, Any], str]] = ( + parse_resource_and_generate_name(logical_id, resource_json, resources_map) + ) if not resource_tuple: return None resource_class, resource_json, new_resource_name = resource_tuple @@ -436,7 +438,11 @@ def parse_and_delete_resource( ) -def parse_condition(condition: Union[Dict[str, Any], bool], resources_map: "ResourceMap", condition_map: Dict[str, Any]) -> bool: # type: ignore[return] +def parse_condition( + condition: Union[Dict[str, Any], bool], + resources_map: "ResourceMap", + condition_map: Dict[str, Any], +) -> bool: # type: ignore[return] if isinstance(condition, bool): return condition @@ -751,7 +757,6 @@ class ResourceMap(collections_abc.Mapping): # type: ignore[type-arg] return all_resources_ready def build_resource_diff(self, other_template: Dict[str, Any]) -> Dict[str, Any]: - old = self._resource_json_map new = other_template["Resources"] @@ -766,7 +771,6 @@ class ResourceMap(collections_abc.Mapping): # type: ignore[type-arg] def build_change_set_actions( self, template: Dict[str, Any] ) -> Dict[str, Dict[str, Dict[str, str]]]: - resource_names_by_action = self.build_resource_diff(template) resources_by_action: Dict[str, Dict[str, Dict[str, str]]] = { @@ -798,7 +802,6 @@ class ResourceMap(collections_abc.Mapping): # type: ignore[type-arg] def update( self, template: Dict[str, Any], parameters: Optional[Dict[str, Any]] = None ) -> None: - resource_names_by_action = self.build_resource_diff(template) for logical_name in resource_names_by_action["Remove"]: @@ -816,7 +819,6 @@ class ResourceMap(collections_abc.Mapping): # type: ignore[type-arg] self._resource_json_map = template["Resources"] for logical_name in resource_names_by_action["Add"]: - # call __getitem__ to initialize the resource # TODO: usage of indexer to initalize the resource is questionable _ = self[logical_name] @@ -867,7 +869,6 @@ class ResourceMap(collections_abc.Mapping): # type: ignore[type-arg] not isinstance(parsed_resource, str) and parsed_resource is not None ): - resource_json = self._resource_json_map[ parsed_resource.logical_resource_id ] diff --git a/moto/cloudfront/responses.py b/moto/cloudfront/responses.py index 6a1c5c9f7..0966ff63d 100644 --- a/moto/cloudfront/responses.py +++ b/moto/cloudfront/responses.py @@ -40,14 +40,18 @@ class CloudFrontResponse(BaseResponse): if request.method == "GET": return self.list_tags_for_resource() - def origin_access_controls(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def origin_access_controls( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "POST": return self.create_origin_access_control() if request.method == "GET": return self.list_origin_access_controls() - def origin_access_control(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def origin_access_control( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "GET": return self.get_origin_access_control() @@ -82,7 +86,9 @@ class CloudFrontResponse(BaseResponse): response = template.render(distributions=distributions) return 200, {}, response - def individual_distribution(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def individual_distribution( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) distribution_id = full_url.split("/")[-1] if request.method == "DELETE": diff --git a/moto/cloudfront/urls.py b/moto/cloudfront/urls.py index 708b86a60..588bff8e8 100644 --- a/moto/cloudfront/urls.py +++ b/moto/cloudfront/urls.py @@ -1,4 +1,5 @@ """cloudfront base URL and path.""" + from .responses import CloudFrontResponse url_bases = [ diff --git a/moto/cloudtrail/exceptions.py b/moto/cloudtrail/exceptions.py index 305118d70..cc74fe9a7 100644 --- a/moto/cloudtrail/exceptions.py +++ b/moto/cloudtrail/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the cloudtrail service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/cloudtrail/responses.py b/moto/cloudtrail/responses.py index a60c2cd51..dce924fc1 100644 --- a/moto/cloudtrail/responses.py +++ b/moto/cloudtrail/responses.py @@ -1,4 +1,5 @@ """Handles incoming cloudtrail requests, invokes methods, returns responses.""" + import json from typing import Any, Dict diff --git a/moto/cloudtrail/urls.py b/moto/cloudtrail/urls.py index 83e2deca0..7cc0b4e98 100644 --- a/moto/cloudtrail/urls.py +++ b/moto/cloudtrail/urls.py @@ -1,4 +1,5 @@ """cloudtrail base URL and path.""" + from .responses import CloudTrailResponse response = CloudTrailResponse() diff --git a/moto/codebuild/models.py b/moto/codebuild/models.py index 4be175617..695c295d7 100644 --- a/moto/codebuild/models.py +++ b/moto/codebuild/models.py @@ -25,9 +25,9 @@ class CodeBuildProjectMetadata(BaseModel): self.build_metadata: Dict[str, Any] = dict() self.build_metadata["id"] = build_id - self.build_metadata[ - "arn" - ] = f"arn:aws:codebuild:{region_name}:{account_id}:build/{build_id}" + self.build_metadata["arn"] = ( + f"arn:aws:codebuild:{region_name}:{account_id}:build/{build_id}" + ) self.build_metadata["buildNumber"] = mock_random.randint(1, 100) self.build_metadata["startTime"] = current_date @@ -86,9 +86,9 @@ class CodeBuildProjectMetadata(BaseModel): self.build_metadata["queuedTimeoutInMinutes"] = 480 self.build_metadata["buildComplete"] = False self.build_metadata["initiator"] = "rootme" - self.build_metadata[ - "encryptionKey" - ] = f"arn:aws:kms:{region_name}:{account_id}:alias/aws/s3" + self.build_metadata["encryptionKey"] = ( + f"arn:aws:kms:{region_name}:{account_id}:alias/aws/s3" + ) class CodeBuild(BaseModel): @@ -106,21 +106,21 @@ class CodeBuild(BaseModel): self.project_metadata: Dict[str, Any] = dict() self.project_metadata["name"] = project_name - self.project_metadata[ - "arn" - ] = f"arn:aws:codebuild:{region}:{account_id}:project/{project_name}" - self.project_metadata[ - "encryptionKey" - ] = f"arn:aws:kms:{region}:{account_id}:alias/aws/s3" - self.project_metadata[ - "serviceRole" - ] = f"arn:aws:iam::{account_id}:role/service-role/{serviceRole}" + self.project_metadata["arn"] = ( + f"arn:aws:codebuild:{region}:{account_id}:project/{project_name}" + ) + self.project_metadata["encryptionKey"] = ( + f"arn:aws:kms:{region}:{account_id}:alias/aws/s3" + ) + self.project_metadata["serviceRole"] = ( + f"arn:aws:iam::{account_id}:role/service-role/{serviceRole}" + ) self.project_metadata["lastModifiedDate"] = current_date self.project_metadata["created"] = current_date self.project_metadata["badge"] = dict() - self.project_metadata["badge"][ - "badgeEnabled" - ] = False # this false needs to be a json false not a python false + self.project_metadata["badge"]["badgeEnabled"] = ( + False # this false needs to be a json false not a python false + ) self.project_metadata["environment"] = environment self.project_metadata["artifacts"] = artifacts self.project_metadata["source"] = project_source @@ -166,7 +166,6 @@ class CodeBuildBackend(BaseBackend): return self.codebuild_projects[project_name].project_metadata def list_projects(self) -> List[str]: - projects = [] for project in self.codebuild_projects.keys(): @@ -180,7 +179,6 @@ class CodeBuildBackend(BaseBackend): source_version: Optional[str] = None, artifact_override: Optional[Dict[str, Any]] = None, ) -> Dict[str, Any]: - build_id = f"{project_name}:{mock_random.uuid4()}" # construct a new build @@ -269,7 +267,6 @@ class CodeBuildBackend(BaseBackend): self.codebuild_projects.pop(project_name, None) def stop_build(self, build_id: str) -> Optional[Dict[str, Any]]: # type: ignore[return] - for metadata in self.build_metadata_history.values(): for build in metadata: if build["id"] == build_id: diff --git a/moto/codecommit/models.py b/moto/codecommit/models.py index 7e1e4e44a..3f87569ef 100644 --- a/moto/codecommit/models.py +++ b/moto/codecommit/models.py @@ -19,19 +19,19 @@ class CodeCommit(BaseModel): current_date = iso_8601_datetime_with_milliseconds() self.repository_metadata = dict() self.repository_metadata["repositoryName"] = repository_name - self.repository_metadata[ - "cloneUrlSsh" - ] = f"ssh://git-codecommit.{region}.amazonaws.com/v1/repos/{repository_name}" - self.repository_metadata[ - "cloneUrlHttp" - ] = f"https://git-codecommit.{region}.amazonaws.com/v1/repos/{repository_name}" + self.repository_metadata["cloneUrlSsh"] = ( + f"ssh://git-codecommit.{region}.amazonaws.com/v1/repos/{repository_name}" + ) + self.repository_metadata["cloneUrlHttp"] = ( + f"https://git-codecommit.{region}.amazonaws.com/v1/repos/{repository_name}" + ) self.repository_metadata["creationDate"] = current_date self.repository_metadata["lastModifiedDate"] = current_date self.repository_metadata["repositoryDescription"] = repository_description self.repository_metadata["repositoryId"] = str(mock_random.uuid4()) - self.repository_metadata[ - "Arn" - ] = f"arn:aws:codecommit:{region}:{account_id}:{repository_name}" + self.repository_metadata["Arn"] = ( + f"arn:aws:codecommit:{region}:{account_id}:{repository_name}" + ) self.repository_metadata["accountId"] = account_id diff --git a/moto/cognitoidentity/exceptions.py b/moto/cognitoidentity/exceptions.py index ed8135efc..99965bb38 100644 --- a/moto/cognitoidentity/exceptions.py +++ b/moto/cognitoidentity/exceptions.py @@ -7,7 +7,6 @@ class ResourceNotFoundError(JsonRESTError): class InvalidNameException(JsonRESTError): - message = "1 validation error detected: Value '{}' at 'identityPoolName' failed to satisfy constraint: Member must satisfy regular expression pattern: [\\w\\s+=,.@-]+" def __init__(self, name: str): diff --git a/moto/cognitoidp/models.py b/moto/cognitoidp/models.py index 051eb7ee4..e1dd68cc9 100644 --- a/moto/cognitoidp/models.py +++ b/moto/cognitoidp/models.py @@ -64,7 +64,6 @@ class AuthFlow(str, enum.Enum): class CognitoIdpUserPoolAttribute(BaseModel): - STANDARD_SCHEMA = { "sub": { "AttributeDataType": "String", @@ -385,7 +384,6 @@ DEFAULT_USER_POOL_CONFIG: Dict[str, Any] = { class CognitoIdpUserPool(BaseModel): - MAX_ID_LENGTH = 55 def __init__( @@ -426,10 +424,10 @@ class CognitoIdpUserPool(BaseModel): standard_attribute_name, standard_attribute_schema, ) in CognitoIdpUserPoolAttribute.STANDARD_SCHEMA.items(): - self.schema_attributes[ - standard_attribute_name - ] = CognitoIdpUserPoolAttribute( - standard_attribute_name, False, standard_attribute_schema + self.schema_attributes[standard_attribute_name] = ( + CognitoIdpUserPoolAttribute( + standard_attribute_name, False, standard_attribute_schema + ) ) self.clients: Dict[str, CognitoIdpUserPoolClient] = OrderedDict() diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py index 8d0d2c5ea..faa12a03a 100644 --- a/moto/cognitoidp/responses.py +++ b/moto/cognitoidp/responses.py @@ -482,9 +482,9 @@ class CognitoIdpResponse(BaseResponse): confirmation_code, response = cognitoidp_backends[account][ region ].forgot_password(client_id, username) - self.response_headers[ - "x-moto-forgot-password-confirmation-code" - ] = confirmation_code # type: ignore[assignment] + self.response_headers["x-moto-forgot-password-confirmation-code"] = ( + confirmation_code # type: ignore[assignment] + ) return json.dumps(response) # This endpoint receives no authorization header, so if moto-server is listening diff --git a/moto/comprehend/exceptions.py b/moto/comprehend/exceptions.py index d4e808d4c..edad17f36 100644 --- a/moto/comprehend/exceptions.py +++ b/moto/comprehend/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the comprehend service.""" + from typing import List from moto.core.exceptions import JsonRESTError diff --git a/moto/comprehend/responses.py b/moto/comprehend/responses.py index 339fdd199..b323c1088 100644 --- a/moto/comprehend/responses.py +++ b/moto/comprehend/responses.py @@ -1,4 +1,5 @@ """Handles incoming comprehend requests, invokes methods, returns responses.""" + import json from moto.core.responses import BaseResponse diff --git a/moto/comprehend/urls.py b/moto/comprehend/urls.py index 6627fc9ef..d300b238f 100644 --- a/moto/comprehend/urls.py +++ b/moto/comprehend/urls.py @@ -1,4 +1,5 @@ """comprehend base URL and path.""" + from .responses import ComprehendResponse url_bases = [ diff --git a/moto/config/models.py b/moto/config/models.py index 2aa96b2fd..aaea0dc4e 100644 --- a/moto/config/models.py +++ b/moto/config/models.py @@ -1,4 +1,5 @@ """Implementation of the AWS Config Service APIs.""" + import json import re import time @@ -487,7 +488,6 @@ class OrganizationConformancePack(ConfigEmptyDictable): class Scope(ConfigEmptyDictable): - """Defines resources that can trigger an evaluation for the rule. Per boto3 documentation, Scope can be one of: @@ -536,7 +536,6 @@ class Scope(ConfigEmptyDictable): class SourceDetail(ConfigEmptyDictable): - """Source and type of event triggering AWS Config resource evaluation. Applies only to customer rules. @@ -633,7 +632,6 @@ class SourceDetail(ConfigEmptyDictable): class Source(ConfigEmptyDictable): - """Defines rule owner, id and notification for triggering evaluation.""" OWNERS = {"AWS", "CUSTOM_LAMBDA"} @@ -713,7 +711,6 @@ class Source(ConfigEmptyDictable): class ConfigRule(ConfigEmptyDictable): - """AWS Config Rule to evaluate compliance of resources to configuration. Can be a managed or custom config rule. Contains the instantiations of @@ -921,7 +918,9 @@ class ConfigBackend(BaseBackend): self.retention_configuration: Optional[RetentionConfiguration] = None @staticmethod - def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, Any]]: # type: ignore[misc] + def default_vpc_endpoint_service( + service_region: str, zones: List[str] + ) -> List[Dict[str, Any]]: # type: ignore[misc] """List of dicts representing default VPC endpoints for this service.""" return BaseBackend.default_vpc_endpoint_service_factory( service_region, zones, "config" diff --git a/moto/core/base_backend.py b/moto/core/base_backend.py index 1d686c3c2..1ed823bfd 100644 --- a/moto/core/base_backend.py +++ b/moto/core/base_backend.py @@ -140,7 +140,8 @@ class BaseBackend: @staticmethod def default_vpc_endpoint_service( - service_region: str, zones: List[str] # pylint: disable=unused-argument + service_region: str, + zones: List[str], # pylint: disable=unused-argument ) -> List[Dict[str, str]]: """Invoke the factory method for any VPC endpoint(s) services.""" return [] @@ -188,9 +189,9 @@ class BaseBackend: # Don't know how private DNS names are different, so for now just # one will be added. if private_dns_names: - endpoint_service[ - "PrivateDnsName" - ] = f"{service}.{service_region}.amazonaws.com" + endpoint_service["PrivateDnsName"] = ( + f"{service}.{service_region}.amazonaws.com" + ) endpoint_service["PrivateDnsNameVerificationState"] = "verified" endpoint_service["PrivateDnsNames"] = [ {"PrivateDnsName": f"{service}.{service_region}.amazonaws.com"} diff --git a/moto/core/botocore_stubber.py b/moto/core/botocore_stubber.py index d1637ad82..e3905dc5e 100644 --- a/moto/core/botocore_stubber.py +++ b/moto/core/botocore_stubber.py @@ -53,7 +53,6 @@ class BotocoreStubber: for service, pattern in backend_index.backend_url_patterns: if pattern.match(clean_url): - if passthrough_service(service): return None diff --git a/moto/core/common_models.py b/moto/core/common_models.py index fdf621f3e..d4e0af765 100644 --- a/moto/core/common_models.py +++ b/moto/core/common_models.py @@ -6,7 +6,9 @@ from .base_backend import SERVICE_BACKEND, BackendDict, InstanceTrackerMeta class BaseModel(metaclass=InstanceTrackerMeta): def __new__( - cls, *args: Any, **kwargs: Any # pylint: disable=unused-argument + cls, + *args: Any, + **kwargs: Any, # pylint: disable=unused-argument ) -> "BaseModel": instance = super(BaseModel, cls).__new__(cls) cls.instances.append(instance) # type: ignore[attr-defined] @@ -46,7 +48,7 @@ class CloudFormationModel(BaseModel): cloudformation_json: Dict[str, Any], account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> Any: # This must be implemented as a classmethod with parameters: # cls, resource_name, cloudformation_json, account_id, region_name diff --git a/moto/core/decorator.py b/moto/core/decorator.py index b74c37c9f..a30fc1912 100644 --- a/moto/core/decorator.py +++ b/moto/core/decorator.py @@ -13,13 +13,13 @@ T = TypeVar("T") @overload -def mock_aws(func: "Callable[P, T]") -> "Callable[P, T]": - ... +def mock_aws(func: "Callable[P, T]") -> "Callable[P, T]": ... @overload -def mock_aws(func: None = None, config: Optional[DefaultConfig] = None) -> "MockAWS": - ... +def mock_aws( + func: None = None, config: Optional[DefaultConfig] = None +) -> "MockAWS": ... def mock_aws( diff --git a/moto/core/exceptions.py b/moto/core/exceptions.py index 7be3b5979..157a63681 100644 --- a/moto/core/exceptions.py +++ b/moto/core/exceptions.py @@ -74,7 +74,9 @@ class RESTError(HTTPException): self.content_type = "application/xml" def get_headers( - self, *args: Any, **kwargs: Any # pylint: disable=unused-argument + self, + *args: Any, + **kwargs: Any, # pylint: disable=unused-argument ) -> List[Tuple[str, str]]: return [ ("X-Amzn-ErrorType", self.relative_error_type or "UnknownError"), @@ -86,7 +88,9 @@ class RESTError(HTTPException): return self.error_type def get_body( - self, *args: Any, **kwargs: Any # pylint: disable=unused-argument + self, + *args: Any, + **kwargs: Any, # pylint: disable=unused-argument ) -> str: return self.description diff --git a/moto/core/models.py b/moto/core/models.py index 51d12821a..123e753f5 100644 --- a/moto/core/models.py +++ b/moto/core/models.py @@ -369,9 +369,9 @@ class ServerModeMockAWS(MockAWS): if region: if "config" in kwargs: user_agent = kwargs["config"].__dict__.get("user_agent_extra") or "" - kwargs["config"].__dict__[ - "user_agent_extra" - ] = f"{user_agent} region/{region}" + kwargs["config"].__dict__["user_agent_extra"] = ( + f"{user_agent} region/{region}" + ) else: config = Config(user_agent_extra="region/" + region) kwargs["config"] = config @@ -406,7 +406,6 @@ class ServerModeMockAWS(MockAWS): class ProxyModeMockAWS(MockAWS): - _RESET_IN_PROGRESS = False def __init__(self, *args: Any, **kwargs: Any): diff --git a/moto/core/responses.py b/moto/core/responses.py index d091b2157..f29801109 100644 --- a/moto/core/responses.py +++ b/moto/core/responses.py @@ -645,7 +645,9 @@ class BaseResponse(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): return if_none def _get_int_param( - self, param_name: str, if_none: TYPE_IF_NONE = None # type: ignore[assignment] + self, + param_name: str, + if_none: TYPE_IF_NONE = None, # type: ignore[assignment] ) -> Union[int, TYPE_IF_NONE]: val = self._get_param(param_name) if val is not None: @@ -653,7 +655,9 @@ class BaseResponse(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): return if_none def _get_bool_param( - self, param_name: str, if_none: TYPE_IF_NONE = None # type: ignore[assignment] + self, + param_name: str, + if_none: TYPE_IF_NONE = None, # type: ignore[assignment] ) -> Union[bool, TYPE_IF_NONE]: val = self._get_param(param_name) if val is not None: diff --git a/moto/core/utils.py b/moto/core/utils.py index 5a8d32bb3..2a73db789 100644 --- a/moto/core/utils.py +++ b/moto/core/utils.py @@ -281,7 +281,7 @@ def tags_from_query_string( def tags_from_cloudformation_tags_list( - tags_list: List[Dict[str, str]] + tags_list: List[Dict[str, str]], ) -> Dict[str, str]: """Return tags in dict form from cloudformation resource tags form (list of dicts)""" tags = {} diff --git a/moto/databrew/models.py b/moto/databrew/models.py index b43854dda..5dc01f198 100644 --- a/moto/databrew/models.py +++ b/moto/databrew/models.py @@ -300,7 +300,6 @@ class DataBrewBackend(BaseBackend): dataset_path_options: Dict[str, Any], tags: Dict[str, str], ) -> "FakeDataset": - if dataset_name not in self.datasets: raise ResourceNotFoundException("One or more resources can't be found.") @@ -621,7 +620,6 @@ class BaseModelABCMeta(ABCMeta, type(BaseModel)): # type: ignore[misc] class FakeJob(BaseModel, metaclass=BaseModelABCMeta): # type: ignore[misc] - ENCRYPTION_MODES = ("SSE-S3", "SSE-KMS") LOG_SUBSCRIPTION_VALUES = ("ENABLE", "DISABLE") diff --git a/moto/databrew/responses.py b/moto/databrew/responses.py index fc0f209f6..285ebfd29 100644 --- a/moto/databrew/responses.py +++ b/moto/databrew/responses.py @@ -29,7 +29,10 @@ class DataBrewResponse(BaseResponse): tags = self.parameters.get("Tags") return json.dumps( self.databrew_backend.create_recipe( - recipe_name, recipe_description, recipe_steps, tags # type: ignore[arg-type] + recipe_name, + recipe_description, + recipe_steps, + tags, # type: ignore[arg-type] ).as_dict() ) @@ -98,7 +101,9 @@ class DataBrewResponse(BaseResponse): recipe_steps = self.parameters.get("Steps") self.databrew_backend.update_recipe( - recipe_name, recipe_description, recipe_steps # type: ignore[arg-type] + recipe_name, + recipe_description, + recipe_steps, # type: ignore[arg-type] ) return json.dumps({"Name": recipe_name}) @@ -141,7 +146,10 @@ class DataBrewResponse(BaseResponse): tags = self.parameters.get("Tags") ruleset = self.databrew_backend.update_ruleset( - ruleset_name, ruleset_description, ruleset_rules, tags # type: ignore[arg-type] + ruleset_name, + ruleset_description, + ruleset_rules, + tags, # type: ignore[arg-type] ) return json.dumps(ruleset.as_dict()) diff --git a/moto/datapipeline/models.py b/moto/datapipeline/models.py index 3ea28e3b2..5672b3bd2 100644 --- a/moto/datapipeline/models.py +++ b/moto/datapipeline/models.py @@ -91,7 +91,7 @@ class Pipeline(CloudFormationModel): cloudformation_json: Dict[str, Any], account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "Pipeline": datapipeline_backend = datapipeline_backends[account_id][region_name] properties = cloudformation_json["Properties"] diff --git a/moto/datasync/models.py b/moto/datasync/models.py index b44f1033e..a5d0437a8 100644 --- a/moto/datasync/models.py +++ b/moto/datasync/models.py @@ -46,7 +46,6 @@ class Task(BaseModel): class TaskExecution(BaseModel): - # For simplicity, task_execution can never fail # Some documentation refers to this list: # 'Status': 'QUEUED'|'LAUNCHING'|'PREPARING'|'TRANSFERRING'|'VERIFYING'|'SUCCESS'|'ERROR' @@ -105,7 +104,9 @@ class DataSyncBackend(BaseBackend): self.task_executions: Dict[str, TaskExecution] = OrderedDict() @staticmethod - def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, Any]]: # type: ignore[misc] + def default_vpc_endpoint_service( + service_region: str, zones: List[str] + ) -> List[Dict[str, Any]]: # type: ignore[misc] """Default VPC endpoint service.""" return BaseBackend.default_vpc_endpoint_service_factory( service_region, zones, "datasync" diff --git a/moto/dax/models.py b/moto/dax/models.py index d12e067d0..d188ed8ac 100644 --- a/moto/dax/models.py +++ b/moto/dax/models.py @@ -1,4 +1,5 @@ """DAXBackend class with methods for supported APIs.""" + from typing import Any, Dict, Iterable, List from moto.core.base_backend import BackendDict, BaseBackend @@ -61,9 +62,9 @@ class DaxEndpoint: def to_json(self, full: bool = False) -> Dict[str, Any]: dct: Dict[str, Any] = {"Port": self.port} if full: - dct[ - "Address" - ] = f"{self.name}.{self.cluster_hex}.dax-clusters.{self.region}.amazonaws.com" + dct["Address"] = ( + f"{self.name}.{self.cluster_hex}.dax-clusters.{self.region}.amazonaws.com" + ) dct["URL"] = f"dax://{dct['Address']}" return dct diff --git a/moto/dax/urls.py b/moto/dax/urls.py index ed4fe11bc..66079deb3 100644 --- a/moto/dax/urls.py +++ b/moto/dax/urls.py @@ -1,4 +1,5 @@ """dax base URL and path.""" + from .responses import DAXResponse url_bases = [ diff --git a/moto/dms/models.py b/moto/dms/models.py index e246de65d..82d8965b0 100644 --- a/moto/dms/models.py +++ b/moto/dms/models.py @@ -19,7 +19,9 @@ class DatabaseMigrationServiceBackend(BaseBackend): self.replication_tasks: Dict[str, "FakeReplicationTask"] = {} @staticmethod - def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, Any]]: # type: ignore[misc] + def default_vpc_endpoint_service( + service_region: str, zones: List[str] + ) -> List[Dict[str, Any]]: # type: ignore[misc] """Default VPC endpoint service.""" return BaseBackend.default_vpc_endpoint_service_factory( service_region, zones, "dms" diff --git a/moto/ds/exceptions.py b/moto/ds/exceptions.py index f34016463..a505a9067 100644 --- a/moto/ds/exceptions.py +++ b/moto/ds/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the Directory Service service.""" + from typing import List, Tuple from moto.core.exceptions import JsonRESTError diff --git a/moto/ds/models.py b/moto/ds/models.py index 53a8ccf95..0dd46d45c 100644 --- a/moto/ds/models.py +++ b/moto/ds/models.py @@ -1,4 +1,5 @@ """DirectoryServiceBackend class with methods for supported APIs.""" + from datetime import datetime, timezone from typing import Any, Dict, List, Optional, Tuple @@ -87,7 +88,8 @@ class Directory(BaseModel): # pylint: disable=too-many-instance-attributes self.connect_settings["VpcId"] # type: ignore[index] ) self.eni_ids, self.subnet_ips = self.create_eni( - self.security_group_id, self.connect_settings["SubnetIds"] # type: ignore[index] + self.security_group_id, + self.connect_settings["SubnetIds"], # type: ignore[index] ) self.connect_settings["SecurityGroupId"] = self.security_group_id # type: ignore[index] self.connect_settings["ConnectIps"] = self.subnet_ips # type: ignore[index] @@ -98,7 +100,8 @@ class Directory(BaseModel): # pylint: disable=too-many-instance-attributes self.vpc_settings["VpcId"] # type: ignore[index] ) self.eni_ids, self.subnet_ips = self.create_eni( - self.security_group_id, self.vpc_settings["SubnetIds"] # type: ignore[index] + self.security_group_id, + self.vpc_settings["SubnetIds"], # type: ignore[index] ) self.vpc_settings["SecurityGroupId"] = self.security_group_id # type: ignore[index] self.dns_ip_addrs = self.subnet_ips diff --git a/moto/ds/responses.py b/moto/ds/responses.py index 4fc711467..a51cb72d4 100644 --- a/moto/ds/responses.py +++ b/moto/ds/responses.py @@ -1,4 +1,5 @@ """Handles Directory Service requests, invokes methods, returns responses.""" + import json from moto.core.exceptions import InvalidToken diff --git a/moto/ds/urls.py b/moto/ds/urls.py index 4ee1c8e42..a31bd1998 100644 --- a/moto/ds/urls.py +++ b/moto/ds/urls.py @@ -1,4 +1,5 @@ """ds base URL and path.""" + from .responses import DirectoryServiceResponse url_bases = [ diff --git a/moto/ds/validations.py b/moto/ds/validations.py index e44fc3f8e..45cdfd322 100644 --- a/moto/ds/validations.py +++ b/moto/ds/validations.py @@ -2,6 +2,7 @@ Note that ValidationExceptions are accumulative. """ + import re from typing import Any @@ -35,7 +36,7 @@ def validate_args(validators: Any) -> None: err_msgs = [] # This eventually could be a switch (python 3.10), elminating the need # for the above map and individual functions. - for (fieldname, value) in validators: + for fieldname, value in validators: msg = validation_map[fieldname](value) if msg: err_msgs.append((fieldname, value, msg)) diff --git a/moto/dynamodb/models/__init__.py b/moto/dynamodb/models/__init__.py index 60a075471..4faa592ec 100644 --- a/moto/dynamodb/models/__init__.py +++ b/moto/dynamodb/models/__init__.py @@ -477,7 +477,9 @@ class DynamoDBBackend(BaseBackend): validated_ast.normalize() try: UpdateExpressionExecutor( - validated_ast, item, expression_attribute_names # type: ignore[arg-type] + validated_ast, + item, + expression_attribute_names, # type: ignore[arg-type] ).execute() except ItemSizeTooLarge: raise ItemSizeToUpdateTooLarge() diff --git a/moto/dynamodb/models/table.py b/moto/dynamodb/models/table.py index 145c892fa..d0056780f 100644 --- a/moto/dynamodb/models/table.py +++ b/moto/dynamodb/models/table.py @@ -76,7 +76,9 @@ class LocalSecondaryIndex(SecondaryIndex): } @staticmethod - def create(dct: Dict[str, Any], table_key_attrs: List[str]) -> "LocalSecondaryIndex": # type: ignore[misc] + def create( + dct: Dict[str, Any], table_key_attrs: List[str] + ) -> "LocalSecondaryIndex": # type: ignore[misc] return LocalSecondaryIndex( index_name=dct["IndexName"], schema=dct["KeySchema"], @@ -112,7 +114,9 @@ class GlobalSecondaryIndex(SecondaryIndex): } @staticmethod - def create(dct: Dict[str, Any], table_key_attrs: List[str]) -> "GlobalSecondaryIndex": # type: ignore[misc] + def create( + dct: Dict[str, Any], table_key_attrs: List[str] + ) -> "GlobalSecondaryIndex": # type: ignore[misc] return GlobalSecondaryIndex( index_name=dct["IndexName"], schema=dct["KeySchema"], @@ -443,9 +447,9 @@ class Table(CloudFormationModel): } if self.latest_stream_label: results[base_key]["LatestStreamLabel"] = self.latest_stream_label - results[base_key][ - "LatestStreamArn" - ] = f"{self.table_arn}/stream/{self.latest_stream_label}" + results[base_key]["LatestStreamArn"] = ( + f"{self.table_arn}/stream/{self.latest_stream_label}" + ) if self.stream_specification and self.stream_specification["StreamEnabled"]: results[base_key]["StreamSpecification"] = self.stream_specification if self.sse_specification and self.sse_specification.get("Enabled") is True: @@ -657,7 +661,6 @@ class Table(CloudFormationModel): filter_expression: Any = None, **filter_kwargs: Any, ) -> Tuple[List[Item], int, Optional[Dict[str, Any]]]: - # FIND POSSIBLE RESULTS if index_name: all_indexes = self.all_indexes() diff --git a/moto/dynamodb/parsing/validators.py b/moto/dynamodb/parsing/validators.py index fd54c7c80..58cb67d09 100644 --- a/moto/dynamodb/parsing/validators.py +++ b/moto/dynamodb/parsing/validators.py @@ -257,7 +257,9 @@ class UpdateExpressionFunctionEvaluator(DepthFirstTraverser): # type: ignore[mi raise NotImplementedError(f"Unsupported function for moto {function_name}") @classmethod - def get_list_from_ddb_typed_value(cls, node: DDBTypedValue, function_name: str) -> DynamoType: # type: ignore[misc] + def get_list_from_ddb_typed_value( + cls, node: DDBTypedValue, function_name: str + ) -> DynamoType: # type: ignore[misc] assert isinstance(node, DDBTypedValue) dynamo_value = node.get_value() assert isinstance(dynamo_value, DynamoType) @@ -322,7 +324,9 @@ class ExecuteOperations(DepthFirstTraverser): # type: ignore[misc] return dynamo_value @classmethod - def get_sum(cls, left_operand: DynamoType, right_operand: DynamoType) -> DDBTypedValue: # type: ignore[misc] + def get_sum( + cls, left_operand: DynamoType, right_operand: DynamoType + ) -> DDBTypedValue: # type: ignore[misc] """ Args: left_operand(DynamoType): @@ -337,7 +341,9 @@ class ExecuteOperations(DepthFirstTraverser): # type: ignore[misc] raise IncorrectOperandType("+", left_operand.type) @classmethod - def get_subtraction(cls, left_operand: DynamoType, right_operand: DynamoType) -> DDBTypedValue: # type: ignore[misc] + def get_subtraction( + cls, left_operand: DynamoType, right_operand: DynamoType + ) -> DDBTypedValue: # type: ignore[misc] """ Args: left_operand(DynamoType): diff --git a/moto/dynamodb/responses.py b/moto/dynamodb/responses.py index b6b02df38..3b50dfc7a 100644 --- a/moto/dynamodb/responses.py +++ b/moto/dynamodb/responses.py @@ -29,7 +29,7 @@ def include_consumed_capacity( Callable[["DynamoHandler"], Union[str, TYPE_RESPONSE]], ]: def _inner( - f: Callable[..., Union[str, TYPE_RESPONSE]] + f: Callable[..., Union[str, TYPE_RESPONSE]], ) -> Callable[["DynamoHandler"], Union[str, TYPE_RESPONSE]]: @wraps(f) def _wrapper( diff --git a/moto/dynamodb_v20111205/models.py b/moto/dynamodb_v20111205/models.py index 97eb95379..a3fb1b2b6 100644 --- a/moto/dynamodb_v20111205/models.py +++ b/moto/dynamodb_v20111205/models.py @@ -165,7 +165,9 @@ class Table(BaseModel): def put_item(self, item_attrs: Dict[str, Any]) -> Item: hash_value = DynamoType(item_attrs.get(self.hash_key_attr)) # type: ignore[arg-type] if self.has_range_key: - range_value: Optional[DynamoType] = DynamoType(item_attrs.get(self.range_key_attr)) # type: ignore[arg-type] + range_value: Optional[DynamoType] = DynamoType( + item_attrs.get(self.range_key_attr) + ) # type: ignore[arg-type] else: range_value = None diff --git a/moto/dynamodbstreams/models.py b/moto/dynamodbstreams/models.py index b69ec43d4..4833a2274 100644 --- a/moto/dynamodbstreams/models.py +++ b/moto/dynamodbstreams/models.py @@ -60,9 +60,9 @@ class ShardIterator(BaseModel): self.sequence_number, ) - self.streams_backend.shard_iterators[ - new_shard_iterator.arn - ] = new_shard_iterator + self.streams_backend.shard_iterators[new_shard_iterator.arn] = ( + new_shard_iterator + ) return {"NextShardIterator": new_shard_iterator.arn, "Records": items} @@ -128,7 +128,10 @@ class DynamoDBStreamsBackend(BaseBackend): assert table.stream_shard.id == shard_id # type: ignore[union-attr] shard_iterator = ShardIterator( - self, table.stream_shard, shard_iterator_type, sequence_number # type: ignore[arg-type] + self, + table.stream_shard, + shard_iterator_type, + sequence_number, # type: ignore[arg-type] ) self.shard_iterators[shard_iterator.arn] = shard_iterator diff --git a/moto/ebs/models.py b/moto/ebs/models.py index a09438086..367b7d968 100644 --- a/moto/ebs/models.py +++ b/moto/ebs/models.py @@ -119,9 +119,9 @@ class EBSBackend(BaseBackend): """ snapshot1 = self.snapshots[first_snapshot_id] snapshot2 = self.snapshots[second_snapshot_id] - changed_blocks: Dict[ - str, Tuple[str, Optional[str]] - ] = dict() # {idx: (token1, token2), ..} + changed_blocks: Dict[str, Tuple[str, Optional[str]]] = ( + dict() + ) # {idx: (token1, token2), ..} for idx in snapshot1.blocks: block1 = snapshot1.blocks[idx] if idx in snapshot2.blocks: diff --git a/moto/ebs/responses.py b/moto/ebs/responses.py index 5d13cda4b..0482d9c4e 100644 --- a/moto/ebs/responses.py +++ b/moto/ebs/responses.py @@ -1,4 +1,5 @@ """Handles incoming ebs requests, invokes methods, returns responses.""" + import json from typing import Any @@ -24,14 +25,18 @@ class EBSResponse(BaseResponse): if request.method == "POST": return self.start_snapshot() - def snapshot_block(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def snapshot_block( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers, use_raw_body=True) if request.method == "PUT": return self.put_snapshot_block(full_url, headers) if request.method == "GET": return self.get_snapshot_block() - def snapshot_blocks(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def snapshot_blocks( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "GET": return self.list_snapshot_blocks() diff --git a/moto/ebs/urls.py b/moto/ebs/urls.py index 05bf5782b..04d7501bb 100644 --- a/moto/ebs/urls.py +++ b/moto/ebs/urls.py @@ -1,4 +1,5 @@ """ebs base URL and path.""" + from .responses import EBSResponse url_bases = [r"https?://ebs\.(.+)\.amazonaws\.com"] diff --git a/moto/ec2/models/__init__.py b/moto/ec2/models/__init__.py index 4f7d7cbfb..d474f9050 100644 --- a/moto/ec2/models/__init__.py +++ b/moto/ec2/models/__init__.py @@ -164,7 +164,9 @@ class EC2Backend( ip[2] += 16 # type: ignore @staticmethod - def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, Any]]: # type: ignore[misc] + def default_vpc_endpoint_service( + service_region: str, zones: List[str] + ) -> List[Dict[str, Any]]: # type: ignore[misc] """Default VPC endpoint service.""" return BaseBackend.default_vpc_endpoint_service_factory( service_region, zones, "ec2" diff --git a/moto/ec2/models/amis.py b/moto/ec2/models/amis.py index 862b97536..65385a01a 100644 --- a/moto/ec2/models/amis.py +++ b/moto/ec2/models/amis.py @@ -172,7 +172,8 @@ class AmiBackend: latest_amis = cast( List[Dict[str, Any]], load_resource( - __name__, f"../resources/{path}/{self.region_name}.json" # type: ignore[attr-defined] + __name__, + f"../resources/{path}/{self.region_name}.json", # type: ignore[attr-defined] ), ) for ami in latest_amis: diff --git a/moto/ec2/models/dhcp_options.py b/moto/ec2/models/dhcp_options.py index 4e45a08fa..b0a27dd90 100644 --- a/moto/ec2/models/dhcp_options.py +++ b/moto/ec2/models/dhcp_options.py @@ -83,7 +83,6 @@ class DHCPOptionsSetBackend: netbios_name_servers: Optional[List[str]] = None, netbios_node_type: Optional[str] = None, ) -> DHCPOptionsSet: - NETBIOS_NODE_TYPES = [1, 2, 4, 8] for field_value in domain_name_servers, ntp_servers, netbios_name_servers: diff --git a/moto/ec2/models/elastic_block_store.py b/moto/ec2/models/elastic_block_store.py index 2416cad4c..d820401f9 100644 --- a/moto/ec2/models/elastic_block_store.py +++ b/moto/ec2/models/elastic_block_store.py @@ -86,7 +86,7 @@ class VolumeAttachment(CloudFormationModel): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "VolumeAttachment": from ..models import ec2_backends @@ -160,7 +160,7 @@ class Volume(TaggedEC2Resource, CloudFormationModel): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "Volume": from ..models import ec2_backends diff --git a/moto/ec2/models/elastic_ip_addresses.py b/moto/ec2/models/elastic_ip_addresses.py index 5252e7eb1..e435e3c8e 100644 --- a/moto/ec2/models/elastic_ip_addresses.py +++ b/moto/ec2/models/elastic_ip_addresses.py @@ -55,7 +55,7 @@ class ElasticAddress(TaggedEC2Resource, CloudFormationModel): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "ElasticAddress": from ..models import ec2_backends diff --git a/moto/ec2/models/elastic_network_interfaces.py b/moto/ec2/models/elastic_network_interfaces.py index d1e4ec9b3..c770c580c 100644 --- a/moto/ec2/models/elastic_network_interfaces.py +++ b/moto/ec2/models/elastic_network_interfaces.py @@ -309,7 +309,7 @@ class NetworkInterfaceBackend: enis = list(self.enis.values()) if filters: - for (_filter, _filter_value) in filters.items(): + for _filter, _filter_value in filters.items(): if _filter == "network-interface-id": _filter = "id" enis = [ diff --git a/moto/ec2/models/fleets.py b/moto/ec2/models/fleets.py index 19dd57da5..8178c3157 100644 --- a/moto/ec2/models/fleets.py +++ b/moto/ec2/models/fleets.py @@ -28,7 +28,6 @@ class Fleet(TaggedEC2Resource): valid_until: str, tag_specifications: List[Dict[str, Any]], ): - self.ec2_backend = ec2_backend self.id = fleet_id self.spot_options = spot_options @@ -269,7 +268,6 @@ class FleetsBackend: valid_until: str, tag_specifications: List[Dict[str, Any]], ) -> Fleet: - fleet_id = random_fleet_id() fleet = Fleet( self, @@ -309,7 +307,6 @@ class FleetsBackend: def delete_fleets( self, fleet_ids: List[str], terminate_instances: bool ) -> List[Fleet]: - fleets = [] for fleet_id in fleet_ids: fleet = self.fleets[fleet_id] diff --git a/moto/ec2/models/iam_instance_profile.py b/moto/ec2/models/iam_instance_profile.py index ad8b75156..f47cc022a 100644 --- a/moto/ec2/models/iam_instance_profile.py +++ b/moto/ec2/models/iam_instance_profile.py @@ -50,7 +50,9 @@ class IamInstanceProfileAssociationBackend: iam_association_id = random_iam_instance_profile_association_id() instance_profile = filter_iam_instance_profiles( - self.account_id, iam_instance_profile_arn, iam_instance_profile_name # type: ignore[attr-defined] + self.account_id, + iam_instance_profile_arn, + iam_instance_profile_name, # type: ignore[attr-defined] ) if instance_id in self.iam_instance_profile_associations.keys(): @@ -63,9 +65,9 @@ class IamInstanceProfileAssociationBackend: instance_profile, ) # Regarding to AWS there can be only one association with ec2. - self.iam_instance_profile_associations[ - instance_id - ] = iam_instance_profile_association + self.iam_instance_profile_associations[instance_id] = ( + iam_instance_profile_association + ) return iam_instance_profile_association def describe_iam_instance_profile_associations( @@ -125,7 +127,9 @@ class IamInstanceProfileAssociationBackend: iam_instance_profile_arn: Optional[str] = None, ) -> IamInstanceProfileAssociation: instance_profile = filter_iam_instance_profiles( - self.account_id, iam_instance_profile_arn, iam_instance_profile_name # type: ignore[attr-defined] + self.account_id, + iam_instance_profile_arn, + iam_instance_profile_name, # type: ignore[attr-defined] ) iam_instance_profile_association = None diff --git a/moto/ec2/models/instances.py b/moto/ec2/models/instances.py index ce6d7a660..26fb59249 100644 --- a/moto/ec2/models/instances.py +++ b/moto/ec2/models/instances.py @@ -383,7 +383,9 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel): return self._state.name == "running" def delete( - self, account_id: str, region: str # pylint: disable=unused-argument + self, + account_id: str, + region: str, # pylint: disable=unused-argument ) -> None: self.terminate() @@ -666,7 +668,8 @@ class InstanceBackend: raise InvalidInstanceTypeError(kwargs["instance_type"]) security_groups = [ - self.get_security_group_by_name_or_id(name) for name in security_group_names # type: ignore[attr-defined] + self.get_security_group_by_name_or_id(name) + for name in security_group_names # type: ignore[attr-defined] ] for sg_id in kwargs.pop("security_group_ids", []): diff --git a/moto/ec2/models/launch_templates.py b/moto/ec2/models/launch_templates.py index e76df70d6..a69aed18f 100644 --- a/moto/ec2/models/launch_templates.py +++ b/moto/ec2/models/launch_templates.py @@ -127,9 +127,8 @@ class LaunchTemplate(TaggedEC2Resource, CloudFormationModel): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "LaunchTemplate": - from ..models import ec2_backends backend = ec2_backends[account_id][region_name] @@ -160,7 +159,6 @@ class LaunchTemplate(TaggedEC2Resource, CloudFormationModel): account_id: str, region_name: str, ) -> "LaunchTemplate": - from ..models import ec2_backends backend = ec2_backends[account_id][region_name] @@ -184,7 +182,6 @@ class LaunchTemplate(TaggedEC2Resource, CloudFormationModel): account_id: str, region_name: str, ) -> None: - from ..models import ec2_backends backend = ec2_backends[account_id][region_name] diff --git a/moto/ec2/models/managed_prefixes.py b/moto/ec2/models/managed_prefixes.py index c5a61438c..8d50e3713 100644 --- a/moto/ec2/models/managed_prefixes.py +++ b/moto/ec2/models/managed_prefixes.py @@ -103,7 +103,9 @@ class ManagedPrefixListBackend: return self.managed_prefix_lists.get(prefix_list_id) def delete_managed_prefix_list(self, prefix_list_id: str) -> ManagedPrefixList: - managed_prefix_list: ManagedPrefixList = self.managed_prefix_lists.get(prefix_list_id) # type: ignore + managed_prefix_list: ManagedPrefixList = self.managed_prefix_lists.get( + prefix_list_id + ) # type: ignore managed_prefix_list.state = "delete-complete" return managed_prefix_list diff --git a/moto/ec2/models/nat_gateways.py b/moto/ec2/models/nat_gateways.py index 33eefac7c..78f466142 100644 --- a/moto/ec2/models/nat_gateways.py +++ b/moto/ec2/models/nat_gateways.py @@ -64,7 +64,7 @@ class NatGateway(CloudFormationModel, TaggedEC2Resource): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "NatGateway": from ..models import ec2_backends diff --git a/moto/ec2/models/network_acls.py b/moto/ec2/models/network_acls.py index 8c7fcb0a9..5e9808778 100644 --- a/moto/ec2/models/network_acls.py +++ b/moto/ec2/models/network_acls.py @@ -79,7 +79,6 @@ class NetworkAclBackend: port_range_from: Optional[int], port_range_to: Optional[int], ) -> "NetworkAclEntry": - network_acl = self.get_network_acl(network_acl_id) if any( entry.egress == egress and entry.rule_number == rule_number @@ -129,7 +128,6 @@ class NetworkAclBackend: port_range_from: int, port_range_to: int, ) -> "NetworkAclEntry": - self.delete_network_acl_entry(network_acl_id, rule_number, egress) network_acl_entry = self.create_network_acl_entry( network_acl_id, @@ -148,7 +146,6 @@ class NetworkAclBackend: def replace_network_acl_association( self, association_id: str, network_acl_id: str ) -> "NetworkAclAssociation": - # lookup existing association for subnet and delete it default_acl = next( value diff --git a/moto/ec2/models/route_tables.py b/moto/ec2/models/route_tables.py index de4eb383e..7b5226838 100644 --- a/moto/ec2/models/route_tables.py +++ b/moto/ec2/models/route_tables.py @@ -488,7 +488,9 @@ class RouteBackend: ) route.instance = self.get_instance(instance_id) if instance_id else None # type: ignore[attr-defined] - route.interface = self.get_network_interface(interface_id) if interface_id else None # type: ignore[attr-defined] + route.interface = ( + self.get_network_interface(interface_id) if interface_id else None + ) # type: ignore[attr-defined] route.vpc_pcx = ( self.get_vpc_peering_connection(vpc_peering_connection_id) # type: ignore[attr-defined] if vpc_peering_connection_id diff --git a/moto/ec2/models/security_groups.py b/moto/ec2/models/security_groups.py index 0a776d76f..cd7ef503e 100644 --- a/moto/ec2/models/security_groups.py +++ b/moto/ec2/models/security_groups.py @@ -226,7 +226,7 @@ class SecurityGroup(TaggedEC2Resource, CloudFormationModel): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "SecurityGroup": from ..models import ec2_backends @@ -310,7 +310,9 @@ class SecurityGroup(TaggedEC2Resource, CloudFormationModel): security_group.delete(account_id, region_name) def delete( - self, account_id: str, region_name: str # pylint: disable=unused-argument + self, + account_id: str, + region_name: str, # pylint: disable=unused-argument ) -> None: """Not exposed as part of the ELB API - used for CloudFormation.""" self.ec2_backend.delete_security_group(group_id=self.id) @@ -627,7 +629,9 @@ class SecurityGroupBackend: return results @staticmethod - def _match_sg_rules(rules_list: List[SecurityRule], filters: Any) -> List[SecurityRule]: # type: ignore[misc] + def _match_sg_rules( + rules_list: List[SecurityRule], filters: Any + ) -> List[SecurityRule]: # type: ignore[misc] results = [] for rule in rules_list: if rule.match_tags(filters): @@ -802,7 +806,9 @@ class SecurityGroupBackend: security_rule_ids: Optional[List[str]] = None, vpc_id: Optional[str] = None, ) -> None: - group: SecurityGroup = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id) # type: ignore[assignment] + group: SecurityGroup = self.get_security_group_by_name_or_id( + group_name_or_id, vpc_id + ) # type: ignore[assignment] if security_rule_ids: group.ingress_rules = [ @@ -971,7 +977,9 @@ class SecurityGroupBackend: security_rule_ids: Optional[List[str]] = None, vpc_id: Optional[str] = None, ) -> None: - group: SecurityGroup = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id) # type: ignore[assignment] + group: SecurityGroup = self.get_security_group_by_name_or_id( + group_name_or_id, vpc_id + ) # type: ignore[assignment] if security_rule_ids: group.egress_rules = [ @@ -1266,7 +1274,7 @@ class SecurityGroupIngress(CloudFormationModel): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "SecurityGroupIngress": from ..models import ec2_backends diff --git a/moto/ec2/models/spot_requests.py b/moto/ec2/models/spot_requests.py index 8225c17d3..0895698aa 100644 --- a/moto/ec2/models/spot_requests.py +++ b/moto/ec2/models/spot_requests.py @@ -188,7 +188,6 @@ class SpotFleetRequest(TaggedEC2Resource, CloudFormationModel): instance_interruption_behaviour: Optional[str], tag_specifications: Optional[List[Dict[str, Any]]], ): - self.ec2_backend = ec2_backend self.spot_backend = spot_backend self.id = spot_fleet_request_id @@ -468,7 +467,6 @@ class SpotRequestBackend: instance_interruption_behaviour: Optional[str] = None, tag_specifications: Optional[List[Dict[str, Any]]] = None, ) -> SpotFleetRequest: - spot_fleet_request_id = random_spot_fleet_request_id() request = SpotFleetRequest( ec2_backend=self, diff --git a/moto/ec2/models/subnets.py b/moto/ec2/models/subnets.py index 8225d7c57..c3096fc5b 100644 --- a/moto/ec2/models/subnets.py +++ b/moto/ec2/models/subnets.py @@ -61,9 +61,9 @@ class Subnet(TaggedEC2Resource, CloudFormationModel): self.reserved_ips = [ next(self._subnet_ip_generator) for _ in range(0, 3) ] # Reserved by AWS - self._unused_ips: Set[ - str - ] = set() # if instance is destroyed hold IP here for reuse + self._unused_ips: Set[str] = ( + set() + ) # if instance is destroyed hold IP here for reuse self._subnet_ips: Dict[str, "Instance"] = {} self.state = "available" @@ -321,7 +321,6 @@ class SubnetBackend: availability_zone_id: Optional[str] = None, tags: Optional[Dict[str, Dict[str, str]]] = None, ) -> Subnet: - subnet_id = random_subnet_id() # Validate VPC exists and the supplied CIDR block is a subnet of the VPC's vpc = self.get_vpc(vpc_id) # type: ignore[attr-defined] diff --git a/moto/ec2/models/transit_gateway_attachments.py b/moto/ec2/models/transit_gateway_attachments.py index 8ed5bb6a2..df97bd56e 100644 --- a/moto/ec2/models/transit_gateway_attachments.py +++ b/moto/ec2/models/transit_gateway_attachments.py @@ -132,9 +132,9 @@ class TransitGatewayAttachmentBackend: transit_gateway_id=transit_gateway_id, tags=tags, ) - self.transit_gateway_attachments[ - transit_gateway_vpn_attachment.id - ] = transit_gateway_vpn_attachment + self.transit_gateway_attachments[transit_gateway_vpn_attachment.id] = ( + transit_gateway_vpn_attachment + ) return transit_gateway_vpn_attachment def create_transit_gateway_vpc_attachment( @@ -153,9 +153,9 @@ class TransitGatewayAttachmentBackend: subnet_ids=subnet_ids, options=options, ) - self.transit_gateway_attachments[ - transit_gateway_vpc_attachment.id - ] = transit_gateway_vpc_attachment + self.transit_gateway_attachments[transit_gateway_vpc_attachment.id] = ( + transit_gateway_vpc_attachment + ) return transit_gateway_vpc_attachment def describe_transit_gateway_attachments( @@ -231,11 +231,12 @@ class TransitGatewayAttachmentBackend: options: Optional[Dict[str, str]] = None, remove_subnet_ids: Optional[List[str]] = None, ) -> TransitGatewayAttachment: - tgw_attachment = self.transit_gateway_attachments[transit_gateway_attachment_id] if remove_subnet_ids: tgw_attachment.subnet_ids = [ # type: ignore[attr-defined] - id for id in tgw_attachment.subnet_ids if id not in remove_subnet_ids # type: ignore[attr-defined] + id + for id in tgw_attachment.subnet_ids + if id not in remove_subnet_ids # type: ignore[attr-defined] ] if options: @@ -294,9 +295,9 @@ class TransitGatewayAttachmentBackend: region_name=self.region_name, # type: ignore[attr-defined] ) - self.transit_gateway_attachments[ - transit_gateway_peering_attachment.id - ] = transit_gateway_peering_attachment + self.transit_gateway_attachments[transit_gateway_peering_attachment.id] = ( + transit_gateway_peering_attachment + ) # If the peer is not same as the current account or region, create attachment in peer backend if self.account_id != peer_account_id or self.region_name != peer_region: # type: ignore[attr-defined] @@ -356,12 +357,18 @@ class TransitGatewayAttachmentBackend: accepter_region_name = transit_gateway_attachment.accepter_tgw_info["region"] # type: ignore[attr-defined] # For cross-account peering, must be accepted by the accepter - if requester_account_id != accepter_account_id and self.account_id != accepter_account_id: # type: ignore[attr-defined] + if ( + requester_account_id != accepter_account_id + and self.account_id != accepter_account_id + ): # type: ignore[attr-defined] raise InvalidParameterValueErrorPeeringAttachment( "accept", transit_gateway_attachment_id ) - if requester_region_name != accepter_region_name and self.region_name != accepter_region_name: # type: ignore[attr-defined] + if ( + requester_region_name != accepter_region_name + and self.region_name != accepter_region_name + ): # type: ignore[attr-defined] raise InvalidParameterValueErrorPeeringAttachment( "accept", transit_gateway_attachment_id ) @@ -384,12 +391,18 @@ class TransitGatewayAttachmentBackend: accepter_account_id = transit_gateway_attachment.accepter_tgw_info["ownerId"] # type: ignore[attr-defined] accepter_region_name = transit_gateway_attachment.requester_tgw_info["region"] # type: ignore[attr-defined] - if requester_account_id != accepter_account_id and self.account_id != accepter_account_id: # type: ignore[attr-defined] + if ( + requester_account_id != accepter_account_id + and self.account_id != accepter_account_id + ): # type: ignore[attr-defined] raise InvalidParameterValueErrorPeeringAttachment( "reject", transit_gateway_attachment_id ) - if requester_region_name != accepter_region_name and self.region_name != accepter_region_name: # type: ignore[attr-defined] + if ( + requester_region_name != accepter_region_name + and self.region_name != accepter_region_name + ): # type: ignore[attr-defined] raise InvalidParameterValueErrorPeeringAttachment( "reject", transit_gateway_attachment_id ) diff --git a/moto/ec2/models/transit_gateway_route_tables.py b/moto/ec2/models/transit_gateway_route_tables.py index ad600b0e1..0c365c0c7 100644 --- a/moto/ec2/models/transit_gateway_route_tables.py +++ b/moto/ec2/models/transit_gateway_route_tables.py @@ -80,9 +80,9 @@ class TransitGatewayRouteTableBackend: default_association_route_table=default_association_route_table, default_propagation_route_table=default_propagation_route_table, ) - self.transit_gateways_route_tables[ - transit_gateways_route_table.id - ] = transit_gateways_route_table + self.transit_gateways_route_tables[transit_gateways_route_table.id] = ( + transit_gateways_route_table + ) return transit_gateways_route_table def get_all_transit_gateway_route_tables( @@ -310,9 +310,9 @@ class TransitGatewayRouteTableBackend: self.set_attachment_association( # type: ignore[attr-defined] transit_gateway_attachment_id, transit_gateway_route_table_id ) - self.transit_gateway_associations[ - transit_gateway_attachment_id - ] = transit_gateway_association + self.transit_gateway_associations[transit_gateway_attachment_id] = ( + transit_gateway_association + ) return transit_gateway_association @@ -331,9 +331,9 @@ class TransitGatewayRouteTableBackend: self.set_attachment_propagation( # type: ignore[attr-defined] transit_gateway_attachment_id, transit_gateway_route_table_id ) - self.transit_gateway_propagations[ - transit_gateway_attachment_id - ] = transit_gateway_propagation + self.transit_gateway_propagations[transit_gateway_attachment_id] = ( + transit_gateway_propagation + ) return transit_gateway_propagation diff --git a/moto/ec2/models/vpcs.py b/moto/ec2/models/vpcs.py index 5ebb02457..d20b5490b 100644 --- a/moto/ec2/models/vpcs.py +++ b/moto/ec2/models/vpcs.py @@ -87,7 +87,6 @@ ENDPOINT_SERVICE_COLLECTION_LOCK = threading.Lock() class VPCEndPoint(TaggedEC2Resource, CloudFormationModel): - DEFAULT_POLICY = { "Version": "2008-10-17", "Statement": [ @@ -223,7 +222,6 @@ class VPC(TaggedEC2Resource, CloudFormationModel): amazon_provided_ipv6_cidr_block: bool = False, ipv6_cidr_block_network_border_group: Optional[str] = None, ): - self.ec2_backend = ec2_backend self.id = vpc_id self.cidr_block = cidr_block @@ -402,9 +400,9 @@ class VPC(TaggedEC2Resource, CloudFormationModel): ) if amazon_provided_ipv6_cidr_block: association_set["ipv6_pool"] = "Amazon" - association_set[ - "ipv6_cidr_block_network_border_group" - ] = ipv6_cidr_block_network_border_group + association_set["ipv6_cidr_block_network_border_group"] = ( + ipv6_cidr_block_network_border_group + ) self.cidr_block_association_set[association_id] = association_set return association_set @@ -670,7 +668,6 @@ class VPCBackend: tags: Optional[Dict[str, str]] = None, private_dns_enabled: Optional[str] = None, ) -> VPCEndPoint: - vpc_endpoint_id = random_vpc_ep_id() # validates if vpc is present or not. @@ -678,7 +675,6 @@ class VPCBackend: destination_prefix_list_id = None if endpoint_type and endpoint_type.lower() == "interface": - network_interface_ids = [] for subnet_id in subnet_ids or []: self.get_subnet(subnet_id) # type: ignore[attr-defined] @@ -776,7 +772,6 @@ class VPCBackend: ) -> List[Dict[str, str]]: """Return list of default services using list of backends.""" with ENDPOINT_SERVICE_COLLECTION_LOCK: - if DEFAULT_VPC_ENDPOINT_SERVICES: return DEFAULT_VPC_ENDPOINT_SERVICES @@ -808,7 +803,9 @@ class VPCBackend: return DEFAULT_VPC_ENDPOINT_SERVICES @staticmethod - def _matches_service_by_tags(service: Dict[str, Any], filter_item: Dict[str, Any]) -> bool: # type: ignore[misc] + def _matches_service_by_tags( + service: Dict[str, Any], filter_item: Dict[str, Any] + ) -> bool: # type: ignore[misc] """Return True if service tags are not filtered by their tags. Note that the API specifies a key of "Values" for a filter, but @@ -840,7 +837,11 @@ class VPCBackend: return matched @staticmethod - def _filter_endpoint_services(service_names_filters: List[str], filters: List[Dict[str, Any]], services: List[Dict[str, Any]]) -> List[Dict[str, Any]]: # type: ignore[misc] + def _filter_endpoint_services( + service_names_filters: List[str], + filters: List[Dict[str, Any]], + services: List[Dict[str, Any]], + ) -> List[Dict[str, Any]]: # type: ignore[misc] """Return filtered list of VPC endpoint services.""" if not service_names_filters and not filters: return services @@ -913,7 +914,8 @@ class VPCBackend: The DryRun parameter is ignored. """ default_services = self._collect_default_endpoint_services( - self.account_id, region # type: ignore[attr-defined] + self.account_id, + region, # type: ignore[attr-defined] ) custom_services = [x.to_dict() for x in self.configurations.values()] # type: ignore all_services = default_services + custom_services diff --git a/moto/ec2/models/vpn_connections.py b/moto/ec2/models/vpn_connections.py index 03641be94..e4604d410 100644 --- a/moto/ec2/models/vpn_connections.py +++ b/moto/ec2/models/vpn_connections.py @@ -64,7 +64,6 @@ class VPNConnectionBackend: return vpn_connection def delete_vpn_connection(self, vpn_connection_id: str) -> VPNConnection: - if vpn_connection_id in self.vpn_connections: self.vpn_connections[vpn_connection_id].state = "deleted" else: diff --git a/moto/ec2/models/vpn_gateway.py b/moto/ec2/models/vpn_gateway.py index edb65287f..1b29a1734 100644 --- a/moto/ec2/models/vpn_gateway.py +++ b/moto/ec2/models/vpn_gateway.py @@ -32,7 +32,7 @@ class VPCGatewayAttachment(CloudFormationModel): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "VPCGatewayAttachment": from ..models import ec2_backends @@ -93,7 +93,7 @@ class VpnGateway(CloudFormationModel, TaggedEC2Resource): cloudformation_json: Any, account_id: str, region_name: str, - **kwargs: Any + **kwargs: Any, ) -> "VpnGateway": from ..models import ec2_backends diff --git a/moto/ec2/responses/instances.py b/moto/ec2/responses/instances.py index 78b1ccfc9..337ef7351 100644 --- a/moto/ec2/responses/instances.py +++ b/moto/ec2/responses/instances.py @@ -398,7 +398,6 @@ class InstanceResponse(EC2BaseResponse): @staticmethod def _validate_block_device_mapping(device_mapping: Dict[str, Any]) -> None: # type: ignore[misc] - from botocore import __version__ as botocore_version if "no_device" in device_mapping: diff --git a/moto/ec2/responses/transit_gateways.py b/moto/ec2/responses/transit_gateways.py index 013ab2528..ae1802d4c 100644 --- a/moto/ec2/responses/transit_gateways.py +++ b/moto/ec2/responses/transit_gateways.py @@ -22,12 +22,12 @@ class TransitGateways(EC2BaseResponse): default_propagation_route_table=True, ) ) - transit_gateway.options[ - "AssociationDefaultRouteTableId" - ] = transit_gateway_route_table.id - transit_gateway.options[ - "PropagationDefaultRouteTableId" - ] = transit_gateway_route_table.id + transit_gateway.options["AssociationDefaultRouteTableId"] = ( + transit_gateway_route_table.id + ) + transit_gateway.options["PropagationDefaultRouteTableId"] = ( + transit_gateway_route_table.id + ) template = self.response_template(CREATE_TRANSIT_GATEWAY_RESPONSE) return template.render(transit_gateway=transit_gateway) diff --git a/moto/ec2/utils.py b/moto/ec2/utils.py index 3446755f9..2e0f47da2 100644 --- a/moto/ec2/utils.py +++ b/moto/ec2/utils.py @@ -544,7 +544,7 @@ def generic_filter( filters: Dict[str, Any], objects: List[FILTER_TYPE] ) -> List[FILTER_TYPE]: if filters: - for (_filter, _filter_value) in filters.items(): + for _filter, _filter_value in filters.items(): objects = [ obj for obj in objects @@ -710,7 +710,7 @@ def _convert_rfc4716(data: bytes) -> bytes: def public_key_parse( - key_material: Union[str, bytes] + key_material: Union[str, bytes], ) -> Union[RSAPublicKey, Ed25519PublicKey]: try: if isinstance(key_material, str): diff --git a/moto/ecr/models.py b/moto/ecr/models.py index b049fbce6..09e4b810b 100644 --- a/moto/ecr/models.py +++ b/moto/ecr/models.py @@ -109,9 +109,9 @@ class Repository(BaseObject, CloudFormationModel): if not encryption_config: return {"encryptionType": "AES256"} if encryption_config == {"encryptionType": "KMS"}: - encryption_config[ - "kmsKey" - ] = f"arn:aws:kms:{self.region_name}:{self.account_id}:key/{random.uuid4()}" + encryption_config["kmsKey"] = ( + f"arn:aws:kms:{self.region_name}:{self.account_id}:key/{random.uuid4()}" + ) return encryption_config def _get_image( @@ -372,7 +372,9 @@ class Image(BaseObject): "registryId": self.registry_id, } return { - k: v for k, v in response_object.items() if v is not None and v != [None] # type: ignore + k: v + for k, v in response_object.items() + if v is not None and v != [None] # type: ignore } @property @@ -381,7 +383,9 @@ class Image(BaseObject): response_object["imageDigest"] = self.get_image_digest() response_object["imageTag"] = self.image_tag return { - k: v for k, v in response_object.items() if v is not None and v != [None] # type: ignore + k: v + for k, v in response_object.items() + if v is not None and v != [None] # type: ignore } @@ -399,7 +403,9 @@ class ECRBackend(BaseBackend): self.tagger = TaggingService(tag_name="tags") @staticmethod - def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, Any]]: # type: ignore[misc] + def default_vpc_endpoint_service( + service_region: str, zones: List[str] + ) -> List[Dict[str, Any]]: # type: ignore[misc] """Default VPC endpoint service.""" docker_endpoint = { "AcceptanceRequired": False, @@ -757,7 +763,6 @@ class ECRBackend(BaseBackend): continue for num, image in enumerate(repository.images): - # Search by matching both digest and tag if "imageDigest" in image_id and "imageTag" in image_id: if ( diff --git a/moto/ecs/models.py b/moto/ecs/models.py index 1d98a4c2b..3e06e3c82 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -99,9 +99,9 @@ class Cluster(BaseObject, CloudFormationModel): response_object["clusterArn"] = self.arn response_object["clusterName"] = self.name response_object["capacityProviders"] = self.capacity_providers - response_object[ - "defaultCapacityProviderStrategy" - ] = self.default_capacity_provider_strategy + response_object["defaultCapacityProviderStrategy"] = ( + self.default_capacity_provider_strategy + ) del response_object["arn"], response_object["name"] return response_object @@ -484,9 +484,9 @@ class CapacityProvider(BaseObject): def update(self, asg_details: Dict[str, Any]) -> None: if "managedTerminationProtection" in asg_details: - self.auto_scaling_group_provider[ - "managedTerminationProtection" - ] = asg_details["managedTerminationProtection"] + self.auto_scaling_group_provider["managedTerminationProtection"] = ( + asg_details["managedTerminationProtection"] + ) if "managedScaling" in asg_details: scaling_props = [ "status", @@ -497,9 +497,9 @@ class CapacityProvider(BaseObject): ] for prop in scaling_props: if prop in asg_details["managedScaling"]: - self.auto_scaling_group_provider["managedScaling"][ - prop - ] = asg_details["managedScaling"][prop] + self.auto_scaling_group_provider["managedScaling"][prop] = ( + asg_details["managedScaling"][prop] + ) self.auto_scaling_group_provider = self._prepare_asg_provider( self.auto_scaling_group_provider ) @@ -963,7 +963,9 @@ class EC2ContainerServiceBackend(BaseBackend): self.container_instances: Dict[str, Dict[str, ContainerInstance]] = {} @staticmethod - def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, Any]]: # type: ignore[misc] + def default_vpc_endpoint_service( + service_region: str, zones: List[str] + ) -> List[Dict[str, Any]]: # type: ignore[misc] """Default VPC endpoint service.""" return BaseBackend.default_vpc_endpoint_service_factory( service_region, zones, "ecs" @@ -1171,7 +1173,6 @@ class EC2ContainerServiceBackend(BaseBackend): pid_mode: Optional[str] = None, ephemeral_storage: Optional[Dict[str, int]] = None, ) -> TaskDefinition: - if requires_compatibilities and "FARGATE" in requires_compatibilities: # TODO need more validation for Fargate if pid_mode and pid_mode != "task": @@ -1215,7 +1216,11 @@ class EC2ContainerServiceBackend(BaseBackend): return task_definition @staticmethod - def _validate_container_defs(memory: Optional[str], container_definitions: List[Dict[str, Any]], requires_compatibilities: Optional[List[str]]) -> None: # type: ignore[misc] + def _validate_container_defs( + memory: Optional[str], + container_definitions: List[Dict[str, Any]], + requires_compatibilities: Optional[List[str]], + ) -> None: # type: ignore[misc] # The capitalised keys are passed by Cloudformation for cd in container_definitions: if "name" not in cd and "Name" not in cd: @@ -1360,7 +1365,9 @@ class EC2ContainerServiceBackend(BaseBackend): return tasks @staticmethod - def _calculate_task_resource_requirements(task_definition: TaskDefinition) -> Dict[str, Any]: # type: ignore[misc] + def _calculate_task_resource_requirements( + task_definition: TaskDefinition, + ) -> Dict[str, Any]: # type: ignore[misc] resource_requirements: Dict[str, Any] = { "CPU": 0, "MEMORY": 0, @@ -1402,7 +1409,10 @@ class EC2ContainerServiceBackend(BaseBackend): return resource_requirements @staticmethod - def _can_be_placed(container_instance: ContainerInstance, task_resource_requirements: Dict[str, Any]) -> bool: # type: ignore[misc] + def _can_be_placed( + container_instance: ContainerInstance, + task_resource_requirements: Dict[str, Any], + ) -> bool: # type: ignore[misc] """ :param container_instance: The container instance trying to be placed onto @@ -1736,9 +1746,9 @@ class EC2ContainerServiceBackend(BaseBackend): if not self.container_instances.get(cluster_name): self.container_instances[cluster_name] = {} container_instance_id = container_instance.container_instance_arn.split("/")[-1] - self.container_instances[cluster_name][ - container_instance_id - ] = container_instance + self.container_instances[cluster_name][container_instance_id] = ( + container_instance + ) self.clusters[cluster_name].registered_container_instances_count += 1 return container_instance @@ -1861,9 +1871,9 @@ class EC2ContainerServiceBackend(BaseBackend): elif force and container_instance.running_tasks_count > 0: if not self.container_instances.get("orphaned"): self.container_instances["orphaned"] = {} - self.container_instances["orphaned"][ - container_instance_id - ] = container_instance + self.container_instances["orphaned"][container_instance_id] = ( + container_instance + ) del self.container_instances[cluster.name][container_instance_id] self._respond_to_cluster_state_update(cluster_str) return container_instance diff --git a/moto/elasticache/exceptions.py b/moto/elasticache/exceptions.py index 703db9749..876406b4e 100644 --- a/moto/elasticache/exceptions.py +++ b/moto/elasticache/exceptions.py @@ -14,7 +14,6 @@ EXCEPTION_RESPONSE = """ class ElastiCacheException(RESTError): - code = 400 extended_templates = {"ecerror": EXCEPTION_RESPONSE} env = RESTError.extended_environment(extended_templates) @@ -25,7 +24,6 @@ class ElastiCacheException(RESTError): class PasswordTooShort(ElastiCacheException): - code = 404 def __init__(self) -> None: @@ -36,7 +34,6 @@ class PasswordTooShort(ElastiCacheException): class PasswordRequired(ElastiCacheException): - code = 404 def __init__(self) -> None: @@ -47,7 +44,6 @@ class PasswordRequired(ElastiCacheException): class UserAlreadyExists(ElastiCacheException): - code = 404 def __init__(self) -> None: @@ -55,7 +51,6 @@ class UserAlreadyExists(ElastiCacheException): class UserNotFound(ElastiCacheException): - code = 404 def __init__(self, user_id: str): @@ -63,18 +58,18 @@ class UserNotFound(ElastiCacheException): class CacheClusterAlreadyExists(ElastiCacheException): - code = 404 def __init__(self, cache_cluster_id: str): - super().__init__( - "CacheClusterAlreadyExists", - message=f"Cache cluster {cache_cluster_id} already exists.", - ), + ( + super().__init__( + "CacheClusterAlreadyExists", + message=f"Cache cluster {cache_cluster_id} already exists.", + ), + ) class CacheClusterNotFound(ElastiCacheException): - code = 404 def __init__(self, cache_cluster_id: str): @@ -85,7 +80,6 @@ class CacheClusterNotFound(ElastiCacheException): class InvalidARNFault(ElastiCacheException): - code = 400 def __init__(self, arn: str): diff --git a/moto/elasticache/urls.py b/moto/elasticache/urls.py index 755d63237..cd0cf1b3c 100644 --- a/moto/elasticache/urls.py +++ b/moto/elasticache/urls.py @@ -1,4 +1,5 @@ """elasticache base URL and path.""" + from .responses import ElastiCacheResponse url_bases = [ diff --git a/moto/elasticbeanstalk/exceptions.py b/moto/elasticbeanstalk/exceptions.py index 619adbaa1..e3a8c2d78 100644 --- a/moto/elasticbeanstalk/exceptions.py +++ b/moto/elasticbeanstalk/exceptions.py @@ -14,7 +14,6 @@ EXCEPTION_RESPONSE = """ class ElasticBeanstalkException(RESTError): - code = 400 extended_templates = {"ecerror": EXCEPTION_RESPONSE} env = RESTError.extended_environment(extended_templates) @@ -35,7 +34,6 @@ class ResourceNotFoundException(RESTError): class ApplicationNotFound(ElasticBeanstalkException): - code = 404 def __init__(self, application_name: str): diff --git a/moto/elb/models.py b/moto/elb/models.py index 0d829fd19..fcf5632bb 100644 --- a/moto/elb/models.py +++ b/moto/elb/models.py @@ -554,9 +554,9 @@ class ELBBackend(BaseBackend): if connection_draining: load_balancer.attributes["connection_draining"] = connection_draining if "timeout" not in connection_draining: - load_balancer.attributes["connection_draining"][ - "timeout" - ] = 300 # default + load_balancer.attributes["connection_draining"]["timeout"] = ( + 300 # default + ) if access_log: load_balancer.attributes["access_log"] = access_log diff --git a/moto/elb/responses.py b/moto/elb/responses.py index f8bf14320..c1bd72d0a 100644 --- a/moto/elb/responses.py +++ b/moto/elb/responses.py @@ -298,7 +298,6 @@ class ELBResponse(BaseResponse): return template.render(instances=instances) def add_tags(self) -> str: - for key, value in self.querystring.items(): if "LoadBalancerNames.member" in key: load_balancer_name = value[0] @@ -406,7 +405,8 @@ class ELBResponse(BaseResponse): subnets = params.get("Subnets") all_subnets = self.elb_backend.attach_load_balancer_to_subnets( - load_balancer_name, subnets # type: ignore[arg-type] + load_balancer_name, + subnets, # type: ignore[arg-type] ) template = self.response_template(ATTACH_LB_TO_SUBNETS_TEMPLATE) return template.render(subnets=all_subnets) @@ -417,7 +417,8 @@ class ELBResponse(BaseResponse): subnets = params.get("Subnets") all_subnets = self.elb_backend.detach_load_balancer_from_subnets( - load_balancer_name, subnets # type: ignore[arg-type] + load_balancer_name, + subnets, # type: ignore[arg-type] ) template = self.response_template(DETACH_LB_FROM_SUBNETS_TEMPLATE) return template.render(subnets=all_subnets) diff --git a/moto/elbv2/models.py b/moto/elbv2/models.py index 49b1dcc13..4a52abddd 100644 --- a/moto/elbv2/models.py +++ b/moto/elbv2/models.py @@ -1229,7 +1229,6 @@ Member must satisfy regular expression pattern: {expression}" healthcheck_timeout_seconds is not None and healthcheck_interval_seconds is not None ): - if healthcheck_interval_seconds < healthcheck_timeout_seconds: message = f"Health check timeout '{healthcheck_timeout_seconds}' must be smaller than or equal to the interval '{healthcheck_interval_seconds}'" if protocol in ("HTTP", "HTTPS"): @@ -1475,7 +1474,6 @@ Member must satisfy regular expression pattern: {expression}" target_group_arns: List[str], names: Optional[List[str]], ) -> Iterable[FakeTargetGroup]: - args = sum(bool(arg) for arg in [load_balancer_arn, target_group_arns, names]) if args > 1: diff --git a/moto/emr/models.py b/moto/emr/models.py index e26c03d8f..cc7beb3aa 100644 --- a/moto/emr/models.py +++ b/moto/emr/models.py @@ -139,7 +139,6 @@ class FakeInstanceGroup(CloudFormationModel): region_name: str, **kwargs: Any, ) -> "FakeInstanceGroup": - properties = cloudformation_json["Properties"] job_flow_id = properties["JobFlowId"] ebs_config = properties.get("EbsConfiguration") @@ -455,7 +454,6 @@ class FakeCluster(CloudFormationModel): region_name: str, **kwargs: Any, ) -> "FakeCluster": - properties = cloudformation_json["Properties"] instance_attrs = properties.get("Instances", {}) diff --git a/moto/emr/responses.py b/moto/emr/responses.py index 39f66e9cc..52688e171 100644 --- a/moto/emr/responses.py +++ b/moto/emr/responses.py @@ -24,7 +24,7 @@ def generate_boto3_response( """ def _boto3_request( - method: Callable[["ElasticMapReduceResponse"], str] + method: Callable[["ElasticMapReduceResponse"], str], ) -> Callable[["ElasticMapReduceResponse"], str]: @wraps(method) def f(self: "ElasticMapReduceResponse") -> str: @@ -49,7 +49,6 @@ def generate_boto3_response( class ElasticMapReduceResponse(BaseResponse): - # EMR end points are inconsistent in the placement of region name # in the URL, so parsing it out needs to be handled differently emr_region_regex: List[Pattern[str]] = [ @@ -366,9 +365,9 @@ class ElasticMapReduceResponse(BaseResponse): "KerberosAttributes.CrossRealmTrustPrincipalPassword" ) if cross_realm_principal_password: - kerberos_attributes[ - "CrossRealmTrustPrincipalPassword" - ] = cross_realm_principal_password + kerberos_attributes["CrossRealmTrustPrincipalPassword"] = ( + cross_realm_principal_password + ) ad_domain_join_user = self._get_param("KerberosAttributes.ADDomainJoinUser") if ad_domain_join_user: @@ -472,9 +471,9 @@ class ElasticMapReduceResponse(BaseResponse): ) if vol_type in ebs_configuration: instance_group.pop(vol_type) - ebs_block[volume_specification][ - volume_type - ] = ebs_configuration.pop(vol_type) + ebs_block[volume_specification][volume_type] = ( + ebs_configuration.pop(vol_type) + ) per_instance = f"{key}._{volumes_per_instance}" if per_instance in ebs_configuration: diff --git a/moto/emr/utils.py b/moto/emr/utils.py index 53f28b7ac..1beec2969 100644 --- a/moto/emr/utils.py +++ b/moto/emr/utils.py @@ -28,7 +28,7 @@ def random_instance_group_id() -> str: def steps_from_query_string( - querystring_dict: List[Dict[str, Any]] + querystring_dict: List[Dict[str, Any]], ) -> List[Dict[str, Any]]: steps = [] for step in querystring_dict: @@ -455,7 +455,9 @@ class EmrSecurityGroupManager: pass @staticmethod - def _render_rules(rules: Any, managed_groups: Dict[str, Any]) -> List[Dict[str, Any]]: # type: ignore[misc] + def _render_rules( + rules: Any, managed_groups: Dict[str, Any] + ) -> List[Dict[str, Any]]: # type: ignore[misc] rendered_rules = copy.deepcopy(rules) for rule in rendered_rules: rule["group_name_or_id"] = managed_groups[rule["group_name_or_id"]].id diff --git a/moto/emrcontainers/exceptions.py b/moto/emrcontainers/exceptions.py index 492fff856..37ec4f068 100644 --- a/moto/emrcontainers/exceptions.py +++ b/moto/emrcontainers/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the emrcontainers service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/emrcontainers/models.py b/moto/emrcontainers/models.py index 653d8de04..aa3f6876e 100644 --- a/moto/emrcontainers/models.py +++ b/moto/emrcontainers/models.py @@ -1,4 +1,5 @@ """EMRContainersBackend class with methods for supported APIs.""" + import re from datetime import datetime from typing import Any, Dict, Iterator, List, Optional, Tuple @@ -274,7 +275,6 @@ class EMRContainersBackend(BaseBackend): configuration_overrides: Dict[str, Any], tags: Dict[str, str], ) -> FakeJob: - if virtual_cluster_id not in self.virtual_clusters.keys(): raise ResourceNotFoundException( f"Virtual cluster {virtual_cluster_id} doesn't exist." @@ -304,7 +304,6 @@ class EMRContainersBackend(BaseBackend): return job def cancel_job_run(self, job_id: str, virtual_cluster_id: str) -> FakeJob: - if not re.match(r"[a-z,A-Z,0-9]{19}", job_id): raise ValidationException("Invalid job run short id") diff --git a/moto/emrcontainers/responses.py b/moto/emrcontainers/responses.py index 43eebaf99..7d4388a6c 100644 --- a/moto/emrcontainers/responses.py +++ b/moto/emrcontainers/responses.py @@ -1,4 +1,5 @@ """Handles incoming emrcontainers requests, invokes methods, returns responses.""" + import json from moto.core.common_types import TYPE_RESPONSE diff --git a/moto/emrcontainers/urls.py b/moto/emrcontainers/urls.py index ce5d974d8..b368b5b89 100644 --- a/moto/emrcontainers/urls.py +++ b/moto/emrcontainers/urls.py @@ -1,4 +1,5 @@ """emrcontainers base URL and path.""" + from .responses import EMRContainersResponse url_bases = [ diff --git a/moto/emrserverless/exceptions.py b/moto/emrserverless/exceptions.py index a91d44f71..de7c7b924 100644 --- a/moto/emrserverless/exceptions.py +++ b/moto/emrserverless/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the emrserverless service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/emrserverless/models.py b/moto/emrserverless/models.py index ca753564e..af5212210 100644 --- a/moto/emrserverless/models.py +++ b/moto/emrserverless/models.py @@ -1,4 +1,5 @@ """EMRServerlessBackend class with methods for supported APIs.""" + import inspect import re from datetime import datetime @@ -74,8 +75,14 @@ class FakeApplication(BaseModel): yield "applicationId", self.id yield "name", self.name yield "arn", self.arn - yield "autoStartConfig", self.auto_start_configuration, - yield "autoStopConfig", self.auto_stop_configuration, + yield ( + "autoStartConfig", + self.auto_start_configuration, + ) + yield ( + "autoStopConfig", + self.auto_stop_configuration, + ) def to_dict(self) -> Dict[str, Any]: """ @@ -146,7 +153,6 @@ class EMRServerlessBackend(BaseBackend): auto_stop_configuration: str, network_configuration: str, ) -> FakeApplication: - if application_type not in ["HIVE", "SPARK"]: raise ValidationException(f"Unsupported engine {application_type}") diff --git a/moto/emrserverless/responses.py b/moto/emrserverless/responses.py index e36930517..b83fc121e 100644 --- a/moto/emrserverless/responses.py +++ b/moto/emrserverless/responses.py @@ -1,4 +1,5 @@ """Handles incoming emrserverless requests, invokes methods, returns responses.""" + import json from moto.core.common_types import TYPE_RESPONSE diff --git a/moto/emrserverless/urls.py b/moto/emrserverless/urls.py index 06a037fed..53ee9d7f1 100644 --- a/moto/emrserverless/urls.py +++ b/moto/emrserverless/urls.py @@ -1,4 +1,5 @@ """emrserverless base URL and path.""" + from .responses import EMRServerlessResponse url_bases = [ diff --git a/moto/es/exceptions.py b/moto/es/exceptions.py index 23c298b08..dfffe59db 100644 --- a/moto/es/exceptions.py +++ b/moto/es/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the ElasticSearch service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/events/models.py b/moto/events/models.py index 9c2a58be6..10f0904e0 100644 --- a/moto/events/models.py +++ b/moto/events/models.py @@ -738,7 +738,8 @@ class Replay(BaseModel): for rule in event_bus.rules.values(): rule.send_to_targets( dict( - event, **{"id": str(random.uuid4()), "replay-name": self.name} # type: ignore + event, + **{"id": str(random.uuid4()), "replay-name": self.name}, # type: ignore ), ) @@ -1397,7 +1398,9 @@ class EventsBackend(BaseBackend): ) @staticmethod - def _condition_param_to_stmt_condition(condition: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: # type: ignore[misc] + def _condition_param_to_stmt_condition( + condition: Optional[Dict[str, Any]], + ) -> Optional[Dict[str, Any]]: # type: ignore[misc] if condition: key = condition["Key"] value = condition["Value"] diff --git a/moto/firehose/exceptions.py b/moto/firehose/exceptions.py index 381c80899..3f3116c90 100644 --- a/moto/firehose/exceptions.py +++ b/moto/firehose/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the Firehose service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/firehose/models.py b/moto/firehose/models.py index e8c35a85a..4576a6d36 100644 --- a/moto/firehose/models.py +++ b/moto/firehose/models.py @@ -11,6 +11,7 @@ Incomplete list of unfinished items: are reported back to the user. Instead an exception is raised. - put_record(), put_record_batch() always set "Encrypted" to False. """ + import io import json import warnings @@ -83,7 +84,7 @@ def find_destination_config_in_args(api_args: Dict[str, Any]) -> Tuple[str, Any] def create_s3_destination_config( - extended_s3_destination_config: Dict[str, Any] + extended_s3_destination_config: Dict[str, Any], ) -> Dict[str, Any]: """Return dict with selected fields copied from ExtendedS3 config. @@ -105,9 +106,7 @@ def create_s3_destination_config( return destination -class DeliveryStream( - BaseModel -): # pylint: disable=too-few-public-methods,too-many-instance-attributes +class DeliveryStream(BaseModel): # pylint: disable=too-few-public-methods,too-many-instance-attributes """Represents a delivery stream, its source and destination configs.""" STATES = {"CREATING", "ACTIVE", "CREATING_FAILED"} @@ -414,7 +413,9 @@ class FirehoseBackend(BaseBackend): } @staticmethod - def put_http_records(http_destination: Dict[str, Any], records: List[Dict[str, bytes]]) -> List[Dict[str, str]]: # type: ignore[misc] + def put_http_records( + http_destination: Dict[str, Any], records: List[Dict[str, bytes]] + ) -> List[Dict[str, str]]: # type: ignore[misc] """Put records to a HTTP destination.""" # Mostly copied from localstack url = http_destination["EndpointConfiguration"]["Url"] @@ -662,9 +663,9 @@ class FirehoseBackend(BaseBackend): # to be updated as well. The problem is that they don't have the # same fields. if dest_name == "ExtendedS3": - delivery_stream.destinations[destination_idx][ - "S3" - ] = create_s3_destination_config(dest_config) + delivery_stream.destinations[destination_idx]["S3"] = ( + create_s3_destination_config(dest_config) + ) elif dest_name == "S3" and "ExtendedS3" in destination: destination["ExtendedS3"] = { k: v diff --git a/moto/firehose/responses.py b/moto/firehose/responses.py index 096830128..0a1314634 100644 --- a/moto/firehose/responses.py +++ b/moto/firehose/responses.py @@ -1,4 +1,5 @@ """Handles Firehose API requests, invokes method and returns response.""" + import json from moto.core.responses import BaseResponse diff --git a/moto/firehose/urls.py b/moto/firehose/urls.py index 8d332099b..3e51ad841 100644 --- a/moto/firehose/urls.py +++ b/moto/firehose/urls.py @@ -1,4 +1,5 @@ """Firehose base URL and path.""" + from .responses import FirehoseResponse url_bases = [r"https?://firehose\.(.+)\.amazonaws\.com"] diff --git a/moto/glacier/responses.py b/moto/glacier/responses.py index 85b4fb15c..47e5f969b 100644 --- a/moto/glacier/responses.py +++ b/moto/glacier/responses.py @@ -37,7 +37,9 @@ class GlacierResponse(BaseResponse): self.setup_class(request, full_url, headers) return self._vault_response(request, full_url, headers) - def _vault_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def _vault_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] method = request.method vault_name = vault_from_glacier_url(full_url) @@ -101,7 +103,9 @@ class GlacierResponse(BaseResponse): self.setup_class(request, full_url, headers) return self._vault_archive_individual_response(request, full_url, headers) - def _vault_archive_individual_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def _vault_archive_individual_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] method = request.method vault_name = full_url.split("/")[-3] archive_id = full_url.split("/")[-1] @@ -117,7 +121,9 @@ class GlacierResponse(BaseResponse): self.setup_class(request, full_url, headers) return self._vault_jobs_response(request, full_url, headers) - def _vault_jobs_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def _vault_jobs_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] method = request.method if hasattr(request, "body"): body = request.body diff --git a/moto/glue/glue_schema_registry_utils.py b/moto/glue/glue_schema_registry_utils.py index 69bbdde61..05ec78764 100644 --- a/moto/glue/glue_schema_registry_utils.py +++ b/moto/glue/glue_schema_registry_utils.py @@ -168,7 +168,6 @@ def validate_registry_id( param_value=registry_name, ) if registry_id.get(REGISTRY_ARN): - raise RegistryNotFoundException( resource="Registry", param_name=REGISTRY_ARN, @@ -364,7 +363,7 @@ def validate_schema_version_number( def validate_schema_version_metadata_pattern_and_length( - metadata_key_value: Dict[str, str] + metadata_key_value: Dict[str, str], ) -> Tuple[str, str]: metadata_key = metadata_key_value.get(METADATA_KEY) metadata_value = metadata_key_value.get(METADATA_VALUE) @@ -376,7 +375,7 @@ def validate_schema_version_metadata_pattern_and_length( def validate_number_of_schema_version_metadata_allowed( - metadata: Dict[str, Any] + metadata: Dict[str, Any], ) -> None: num_metadata_key_value_pairs = 0 for m in metadata.values(): diff --git a/moto/glue/responses.py b/moto/glue/responses.py index 78a23a0b8..6c9aa1514 100644 --- a/moto/glue/responses.py +++ b/moto/glue/responses.py @@ -31,7 +31,9 @@ class GlueResponse(BaseResponse): database_name = database_input.get("Name") # type: ignore if "CatalogId" in self.parameters: database_input["CatalogId"] = self.parameters.get("CatalogId") # type: ignore - self.glue_backend.create_database(database_name, database_input, self.parameters.get("Tags")) # type: ignore[arg-type] + self.glue_backend.create_database( + database_name, database_input, self.parameters.get("Tags") + ) # type: ignore[arg-type] return "" def get_database(self) -> str: @@ -134,7 +136,9 @@ class GlueResponse(BaseResponse): table_name = self.parameters.get("TableName") expression = self.parameters.get("Expression") partitions = self.glue_backend.get_partitions( - database_name, table_name, expression # type: ignore[arg-type] + database_name, + table_name, + expression, # type: ignore[arg-type] ) return json.dumps({"Partitions": [p.as_dict() for p in partitions]}) @@ -154,7 +158,9 @@ class GlueResponse(BaseResponse): partitions_to_get = self.parameters.get("PartitionsToGet") partitions = self.glue_backend.batch_get_partition( - database_name, table_name, partitions_to_get # type: ignore[arg-type] + database_name, + table_name, + partitions_to_get, # type: ignore[arg-type] ) return json.dumps({"Partitions": partitions}) @@ -172,7 +178,9 @@ class GlueResponse(BaseResponse): table_name = self.parameters.get("TableName") partition_input = self.parameters.get("PartitionInputList") errors_output = self.glue_backend.batch_create_partition( - database_name, table_name, partition_input # type: ignore[arg-type] + database_name, + table_name, + partition_input, # type: ignore[arg-type] ) out = {} @@ -188,7 +196,10 @@ class GlueResponse(BaseResponse): part_to_update = self.parameters.get("PartitionValueList") self.glue_backend.update_partition( - database_name, table_name, part_input, part_to_update # type: ignore[arg-type] + database_name, + table_name, + part_input, + part_to_update, # type: ignore[arg-type] ) return "" @@ -198,7 +209,9 @@ class GlueResponse(BaseResponse): entries = self.parameters.get("Entries") errors_output = self.glue_backend.batch_update_partition( - database_name, table_name, entries # type: ignore[arg-type] + database_name, + table_name, + entries, # type: ignore[arg-type] ) out = {} @@ -221,7 +234,9 @@ class GlueResponse(BaseResponse): parts = self.parameters.get("PartitionsToDelete") errors_output = self.glue_backend.batch_delete_partition( - database_name, table_name, parts # type: ignore[arg-type] + database_name, + table_name, + parts, # type: ignore[arg-type] ) out = {} diff --git a/moto/greengrass/models.py b/moto/greengrass/models.py index 1910c652e..b6f6b8110 100644 --- a/moto/greengrass/models.py +++ b/moto/greengrass/models.py @@ -383,30 +383,29 @@ class FakeGroupVersion(BaseModel): self.subscription_definition_version_arn = subscription_definition_version_arn def to_dict(self, include_detail: bool = False) -> Dict[str, Any]: - definition = {} if self.core_definition_version_arn: definition["CoreDefinitionVersionArn"] = self.core_definition_version_arn if self.device_definition_version_arn: - definition[ - "DeviceDefinitionVersionArn" - ] = self.device_definition_version_arn + definition["DeviceDefinitionVersionArn"] = ( + self.device_definition_version_arn + ) if self.function_definition_version_arn: - definition[ - "FunctionDefinitionVersionArn" - ] = self.function_definition_version_arn + definition["FunctionDefinitionVersionArn"] = ( + self.function_definition_version_arn + ) if self.resource_definition_version_arn: - definition[ - "ResourceDefinitionVersionArn" - ] = self.resource_definition_version_arn + definition["ResourceDefinitionVersionArn"] = ( + self.resource_definition_version_arn + ) if self.subscription_definition_version_arn: - definition[ - "SubscriptionDefinitionVersionArn" - ] = self.subscription_definition_version_arn + definition["SubscriptionDefinitionVersionArn"] = ( + self.subscription_definition_version_arn + ) obj: Dict[str, Any] = { "Arn": self.arn, @@ -461,7 +460,6 @@ class FakeAssociatedRole(BaseModel): self.associated_at = utcnow() def to_dict(self, include_detail: bool = False) -> Dict[str, Any]: - obj = {"AssociatedAt": iso_8601_datetime_with_milliseconds(self.associated_at)} if include_detail: obj["RoleArn"] = self.role_arn @@ -510,9 +508,9 @@ class GreengrassBackend(BaseBackend): self.resource_definition_versions: Dict[ str, Dict[str, FakeResourceDefinitionVersion] ] = OrderedDict() - self.subscription_definitions: Dict[ - str, FakeSubscriptionDefinition - ] = OrderedDict() + self.subscription_definitions: Dict[str, FakeSubscriptionDefinition] = ( + OrderedDict() + ) self.subscription_definition_versions: Dict[ str, Dict[str, FakeSubscriptionDefinitionVersion] ] = OrderedDict() @@ -521,7 +519,6 @@ class GreengrassBackend(BaseBackend): def create_core_definition( self, name: str, initial_version: Dict[str, Any] ) -> FakeCoreDefinition: - core_definition = FakeCoreDefinition(self.account_id, self.region_name, name) self.core_definitions[core_definition.id] = core_definition self.create_core_definition_version( @@ -533,7 +530,6 @@ class GreengrassBackend(BaseBackend): return self.core_definitions.values() def get_core_definition(self, core_definition_id: str) -> FakeCoreDefinition: - if core_definition_id not in self.core_definitions: raise IdNotFoundException("That Core List Definition does not exist") return self.core_definitions[core_definition_id] @@ -545,7 +541,6 @@ class GreengrassBackend(BaseBackend): del self.core_definition_versions[core_definition_id] def update_core_definition(self, core_definition_id: str, name: str) -> None: - if name == "": raise InvalidContainerDefinitionException( "Input does not contain any attributes to be updated" @@ -557,7 +552,6 @@ class GreengrassBackend(BaseBackend): def create_core_definition_version( self, core_definition_id: str, cores: List[Dict[str, Any]] ) -> FakeCoreDefinitionVersion: - definition = {"Cores": cores} core_def_ver = FakeCoreDefinitionVersion( self.account_id, self.region_name, core_definition_id, definition @@ -576,7 +570,6 @@ class GreengrassBackend(BaseBackend): def list_core_definition_versions( self, core_definition_id: str ) -> Iterable[FakeCoreDefinitionVersion]: - if core_definition_id not in self.core_definitions: raise IdNotFoundException("That cores definition does not exist.") return self.core_definition_versions[core_definition_id].values() @@ -584,7 +577,6 @@ class GreengrassBackend(BaseBackend): def get_core_definition_version( self, core_definition_id: str, core_definition_version_id: str ) -> FakeCoreDefinitionVersion: - if core_definition_id not in self.core_definitions: raise IdNotFoundException("That cores definition does not exist.") @@ -619,7 +611,6 @@ class GreengrassBackend(BaseBackend): def create_device_definition_version( self, device_definition_id: str, devices: List[Dict[str, Any]] ) -> FakeDeviceDefinitionVersion: - if device_definition_id not in self.device_definitions: raise IdNotFoundException("That devices definition does not exist.") @@ -643,13 +634,11 @@ class GreengrassBackend(BaseBackend): def list_device_definition_versions( self, device_definition_id: str ) -> Iterable[FakeDeviceDefinitionVersion]: - if device_definition_id not in self.device_definitions: raise IdNotFoundException("That devices definition does not exist.") return self.device_definition_versions[device_definition_id].values() def get_device_definition(self, device_definition_id: str) -> FakeDeviceDefinition: - if device_definition_id not in self.device_definitions: raise IdNotFoundException("That Device List Definition does not exist.") return self.device_definitions[device_definition_id] @@ -661,7 +650,6 @@ class GreengrassBackend(BaseBackend): del self.device_definition_versions[device_definition_id] def update_device_definition(self, device_definition_id: str, name: str) -> None: - if name == "": raise InvalidContainerDefinitionException( "Input does not contain any attributes to be updated" @@ -673,7 +661,6 @@ class GreengrassBackend(BaseBackend): def get_device_definition_version( self, device_definition_id: str, device_definition_version_id: str ) -> FakeDeviceDefinitionVersion: - if device_definition_id not in self.device_definitions: raise IdNotFoundException("That devices definition does not exist.") @@ -692,7 +679,6 @@ class GreengrassBackend(BaseBackend): def create_resource_definition( self, name: str, initial_version: Dict[str, Any] ) -> FakeResourceDefinition: - resources = initial_version.get("Resources", []) GreengrassBackend._validate_resources(resources) @@ -712,7 +698,6 @@ class GreengrassBackend(BaseBackend): def get_resource_definition( self, resource_definition_id: str ) -> FakeResourceDefinition: - if resource_definition_id not in self.resource_definitions: raise IdNotFoundException("That Resource List Definition does not exist.") return self.resource_definitions[resource_definition_id] @@ -726,7 +711,6 @@ class GreengrassBackend(BaseBackend): def update_resource_definition( self, resource_definition_id: str, name: str ) -> None: - if name == "": raise InvalidInputException("Invalid resource name.") if resource_definition_id not in self.resource_definitions: @@ -736,7 +720,6 @@ class GreengrassBackend(BaseBackend): def create_resource_definition_version( self, resource_definition_id: str, resources: List[Dict[str, Any]] ) -> FakeResourceDefinitionVersion: - if resource_definition_id not in self.resource_definitions: raise IdNotFoundException("That resource definition does not exist.") @@ -750,9 +733,9 @@ class GreengrassBackend(BaseBackend): resource_def_ver.resource_definition_id, {} ) resources_ver[resource_def_ver.version] = resource_def_ver - self.resource_definition_versions[ - resource_def_ver.resource_definition_id - ] = resources_ver + self.resource_definition_versions[resource_def_ver.resource_definition_id] = ( + resources_ver + ) self.resource_definitions[ resource_definition_id @@ -767,7 +750,6 @@ class GreengrassBackend(BaseBackend): def list_resource_definition_versions( self, resource_definition_id: str ) -> Iterable[FakeResourceDefinitionVersion]: - if resource_definition_id not in self.resource_definition_versions: raise IdNotFoundException("That resources definition does not exist.") @@ -776,7 +758,6 @@ class GreengrassBackend(BaseBackend): def get_resource_definition_version( self, resource_definition_id: str, resource_definition_version_id: str ) -> FakeResourceDefinitionVersion: - if resource_definition_id not in self.resource_definition_versions: raise IdNotFoundException("That resources definition does not exist.") @@ -839,7 +820,6 @@ class GreengrassBackend(BaseBackend): def get_function_definition( self, function_definition_id: str ) -> FakeFunctionDefinition: - if function_definition_id not in self.function_definitions: raise IdNotFoundException("That Lambda List Definition does not exist.") return self.function_definitions[function_definition_id] @@ -853,7 +833,6 @@ class GreengrassBackend(BaseBackend): def update_function_definition( self, function_definition_id: str, name: str ) -> None: - if name == "": raise InvalidContainerDefinitionException( "Input does not contain any attributes to be updated" @@ -868,7 +847,6 @@ class GreengrassBackend(BaseBackend): functions: List[Dict[str, Any]], default_config: Dict[str, Any], ) -> FakeFunctionDefinitionVersion: - if function_definition_id not in self.function_definitions: raise IdNotFoundException("That lambdas does not exist.") @@ -903,7 +881,6 @@ class GreengrassBackend(BaseBackend): def get_function_definition_version( self, function_definition_id: str, function_definition_version_id: str ) -> FakeFunctionDefinitionVersion: - if function_definition_id not in self.function_definition_versions: raise IdNotFoundException("That lambdas definition does not exist.") @@ -921,7 +898,6 @@ class GreengrassBackend(BaseBackend): @staticmethod def _is_valid_subscription_target_or_source(target_or_source: str) -> bool: - if target_or_source in ["cloud", "GGShadowService"]: return True @@ -940,8 +916,9 @@ class GreengrassBackend(BaseBackend): return False @staticmethod - def _validate_subscription_target_or_source(subscriptions: List[Dict[str, Any]]) -> None: # type: ignore[misc] - + def _validate_subscription_target_or_source( + subscriptions: List[Dict[str, Any]], + ) -> None: # type: ignore[misc] target_errors: List[str] = [] source_errors: List[str] = [] @@ -977,7 +954,6 @@ class GreengrassBackend(BaseBackend): def create_subscription_definition( self, name: str, initial_version: Dict[str, Any] ) -> FakeSubscriptionDefinition: - GreengrassBackend._validate_subscription_target_or_source( initial_version["Subscriptions"] ) @@ -1002,7 +978,6 @@ class GreengrassBackend(BaseBackend): def get_subscription_definition( self, subscription_definition_id: str ) -> FakeSubscriptionDefinition: - if subscription_definition_id not in self.subscription_definitions: raise IdNotFoundException( "That Subscription List Definition does not exist." @@ -1018,7 +993,6 @@ class GreengrassBackend(BaseBackend): def update_subscription_definition( self, subscription_definition_id: str, name: str ) -> None: - if name == "": raise InvalidContainerDefinitionException( "Input does not contain any attributes to be updated" @@ -1030,7 +1004,6 @@ class GreengrassBackend(BaseBackend): def create_subscription_definition_version( self, subscription_definition_id: str, subscriptions: List[Dict[str, Any]] ) -> FakeSubscriptionDefinitionVersion: - GreengrassBackend._validate_subscription_target_or_source(subscriptions) if subscription_definition_id not in self.subscription_definitions: @@ -1058,7 +1031,6 @@ class GreengrassBackend(BaseBackend): def get_subscription_definition_version( self, subscription_definition_id: str, subscription_definition_version_id: str ) -> FakeSubscriptionDefinitionVersion: - if subscription_definition_id not in self.subscription_definitions: raise IdNotFoundException("That subscriptions definition does not exist.") @@ -1118,7 +1090,6 @@ class GreengrassBackend(BaseBackend): del self.group_versions[group_id] def update_group(self, group_id: str, name: str) -> None: - if name == "": raise InvalidContainerDefinitionException( "Input does not contain any attributes to be updated" @@ -1136,7 +1107,6 @@ class GreengrassBackend(BaseBackend): resource_definition_version_arn: Optional[str], subscription_definition_version_arn: Optional[str], ) -> FakeGroupVersion: - if group_id not in self.groups: raise IdNotFoundException("That group does not exist.") @@ -1176,7 +1146,6 @@ class GreengrassBackend(BaseBackend): def _is_valid_def_ver_arn( definition_version_arn: Optional[str], kind: str = "cores" ) -> bool: - if definition_version_arn is None: return True @@ -1253,7 +1222,6 @@ class GreengrassBackend(BaseBackend): def get_group_version( self, group_id: str, group_version_id: str ) -> FakeGroupVersion: - if group_id not in self.group_versions: raise IdNotFoundException("That group definition does not exist.") @@ -1271,7 +1239,6 @@ class GreengrassBackend(BaseBackend): deployment_type: str, deployment_id: Optional[str] = None, ) -> FakeDeployment: - deployment_types = ( "NewDeployment", "Redeployment", @@ -1304,7 +1271,6 @@ class GreengrassBackend(BaseBackend): self.group_versions[group_id][group_version_id].core_definition_version_arn is None ): - err = { "ErrorDetails": [ { @@ -1327,7 +1293,6 @@ class GreengrassBackend(BaseBackend): return deployment def list_deployments(self, group_id: str) -> List[FakeDeployment]: - # ListDeployments API does not check specified group is exists return [ deployment @@ -1338,7 +1303,6 @@ class GreengrassBackend(BaseBackend): def get_deployment_status( self, group_id: str, deployment_id: str ) -> FakeDeploymentStatus: - if deployment_id not in self.deployments: raise InvalidInputException(f"Deployment '{deployment_id}' does not exist.") @@ -1354,7 +1318,6 @@ class GreengrassBackend(BaseBackend): ) def reset_deployments(self, group_id: str, force: bool = False) -> FakeDeployment: - if group_id not in self.groups: raise ResourceNotFoundException("That Group Definition does not exist.") @@ -1380,7 +1343,6 @@ class GreengrassBackend(BaseBackend): def associate_role_to_group( self, group_id: str, role_arn: str ) -> FakeAssociatedRole: - # I don't know why, AssociateRoleToGroup does not check specified group is exists # So, this API allows any group id such as "a" @@ -1389,7 +1351,6 @@ class GreengrassBackend(BaseBackend): return associated_role def get_associated_role(self, group_id: str) -> FakeAssociatedRole: - if group_id not in self.group_role_associations: raise GreengrassClientError( "404", "You need to attach an IAM role to this deployment group." diff --git a/moto/greengrass/responses.py b/moto/greengrass/responses.py index 9db3e5ddd..1b4de974f 100644 --- a/moto/greengrass/responses.py +++ b/moto/greengrass/responses.py @@ -16,7 +16,9 @@ class GreengrassResponse(BaseResponse): def greengrass_backend(self) -> GreengrassBackend: return greengrass_backends[self.current_account][self.region] - def core_definitions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def core_definitions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -43,7 +45,9 @@ class GreengrassResponse(BaseResponse): ) return 201, {"status": 201}, json.dumps(res.to_dict()) - def core_definition(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def core_definition( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -77,7 +81,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps({}) - def core_definition_versions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def core_definition_versions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -104,7 +110,9 @@ class GreengrassResponse(BaseResponse): json.dumps({"Versions": [core_def_ver.to_dict() for core_def_ver in res]}), ) - def core_definition_version(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def core_definition_version( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -119,7 +127,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps(res.to_dict(include_detail=True)) - def device_definitions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def device_definitions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -129,7 +139,6 @@ class GreengrassResponse(BaseResponse): return self.list_device_definition() def create_device_definition(self) -> TYPE_RESPONSE: - name = self._get_param("Name") initial_version = self._get_param("InitialVersion") res = self.greengrass_backend.create_device_definition( @@ -151,7 +160,9 @@ class GreengrassResponse(BaseResponse): ), ) - def device_definition_versions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def device_definition_versions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -161,7 +172,6 @@ class GreengrassResponse(BaseResponse): return self.list_device_definition_versions() def create_device_definition_version(self) -> TYPE_RESPONSE: - device_definition_id = self.path.split("/")[-2] devices = self._get_param("Devices") @@ -171,7 +181,6 @@ class GreengrassResponse(BaseResponse): return 201, {"status": 201}, json.dumps(res.to_dict()) def list_device_definition_versions(self) -> TYPE_RESPONSE: - device_definition_id = self.path.split("/")[-2] res = self.greengrass_backend.list_device_definition_versions( device_definition_id @@ -184,7 +193,9 @@ class GreengrassResponse(BaseResponse): ), ) - def device_definition(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def device_definition( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -204,7 +215,6 @@ class GreengrassResponse(BaseResponse): return 200, {"status": 200}, json.dumps(res.to_dict()) def delete_device_definition(self) -> TYPE_RESPONSE: - device_definition_id = self.path.split("/")[-1] self.greengrass_backend.delete_device_definition( device_definition_id=device_definition_id @@ -212,7 +222,6 @@ class GreengrassResponse(BaseResponse): return 200, {"status": 200}, json.dumps({}) def update_device_definition(self) -> TYPE_RESPONSE: - device_definition_id = self.path.split("/")[-1] name = self._get_param("Name") self.greengrass_backend.update_device_definition( @@ -220,7 +229,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps({}) - def device_definition_version(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def device_definition_version( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -235,7 +246,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps(res.to_dict(include_detail=True)) - def resource_definitions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def resource_definitions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -245,7 +258,6 @@ class GreengrassResponse(BaseResponse): return self.list_resource_definitions() def create_resource_definition(self) -> TYPE_RESPONSE: - initial_version = self._get_param("InitialVersion") name = self._get_param("Name") res = self.greengrass_backend.create_resource_definition( @@ -254,7 +266,6 @@ class GreengrassResponse(BaseResponse): return 201, {"status": 201}, json.dumps(res.to_dict()) def list_resource_definitions(self) -> TYPE_RESPONSE: - res = self.greengrass_backend.list_resource_definitions() return ( 200, @@ -262,7 +273,9 @@ class GreengrassResponse(BaseResponse): json.dumps({"Definitions": [i.to_dict() for i in res]}), ) - def resource_definition(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def resource_definition( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -282,7 +295,6 @@ class GreengrassResponse(BaseResponse): return 200, {"status": 200}, json.dumps(res.to_dict()) def delete_resource_definition(self) -> TYPE_RESPONSE: - resource_definition_id = self.path.split("/")[-1] self.greengrass_backend.delete_resource_definition( resource_definition_id=resource_definition_id @@ -290,7 +302,6 @@ class GreengrassResponse(BaseResponse): return 200, {"status": 200}, json.dumps({}) def update_resource_definition(self) -> TYPE_RESPONSE: - resource_definition_id = self.path.split("/")[-1] name = self._get_param("Name") self.greengrass_backend.update_resource_definition( @@ -298,7 +309,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps({}) - def resource_definition_versions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def resource_definition_versions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -308,7 +321,6 @@ class GreengrassResponse(BaseResponse): return self.list_resource_definition_versions() def create_resource_definition_version(self) -> TYPE_RESPONSE: - resource_definition_id = self.path.split("/")[-2] resources = self._get_param("Resources") @@ -318,7 +330,6 @@ class GreengrassResponse(BaseResponse): return 201, {"status": 201}, json.dumps(res.to_dict()) def list_resource_definition_versions(self) -> TYPE_RESPONSE: - resource_device_definition_id = self.path.split("/")[-2] res = self.greengrass_backend.list_resource_definition_versions( resource_device_definition_id @@ -332,7 +343,9 @@ class GreengrassResponse(BaseResponse): ), ) - def resource_definition_version(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def resource_definition_version( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -347,7 +360,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps(res.to_dict()) - def function_definitions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def function_definitions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -357,7 +372,6 @@ class GreengrassResponse(BaseResponse): return self.list_function_definitions() def create_function_definition(self) -> TYPE_RESPONSE: - initial_version = self._get_param("InitialVersion") name = self._get_param("Name") res = self.greengrass_backend.create_function_definition( @@ -375,7 +389,9 @@ class GreengrassResponse(BaseResponse): ), ) - def function_definition(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def function_definition( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -409,7 +425,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps({}) - def function_definition_versions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def function_definition_versions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -419,7 +437,6 @@ class GreengrassResponse(BaseResponse): return self.list_function_definition_versions() def create_function_definition_version(self) -> TYPE_RESPONSE: - default_config = self._get_param("DefaultConfig") function_definition_id = self.path.split("/")[-2] functions = self._get_param("Functions") @@ -439,7 +456,9 @@ class GreengrassResponse(BaseResponse): versions = [i.to_dict() for i in res.values()] return 200, {"status": 200}, json.dumps({"Versions": versions}) - def function_definition_version(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def function_definition_version( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -454,7 +473,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps(res.to_dict()) - def subscription_definitions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def subscription_definitions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -464,7 +485,6 @@ class GreengrassResponse(BaseResponse): return self.list_subscription_definitions() def create_subscription_definition(self) -> TYPE_RESPONSE: - initial_version = self._get_param("InitialVersion") name = self._get_param("Name") res = self.greengrass_backend.create_subscription_definition( @@ -473,7 +493,6 @@ class GreengrassResponse(BaseResponse): return 201, {"status": 201}, json.dumps(res.to_dict()) def list_subscription_definitions(self) -> TYPE_RESPONSE: - res = self.greengrass_backend.list_subscription_definitions() return ( 200, @@ -488,7 +507,9 @@ class GreengrassResponse(BaseResponse): ), ) - def subscription_definition(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def subscription_definition( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -522,7 +543,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps({}) - def subscription_definition_versions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def subscription_definition_versions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -532,7 +555,6 @@ class GreengrassResponse(BaseResponse): return self.list_subscription_definition_versions() def create_subscription_definition_version(self) -> TYPE_RESPONSE: - subscription_definition_id = self.path.split("/")[-2] subscriptions = self._get_param("Subscriptions") res = self.greengrass_backend.create_subscription_definition_version( @@ -549,7 +571,9 @@ class GreengrassResponse(BaseResponse): versions = [i.to_dict() for i in res.values()] return 200, {"status": 200}, json.dumps({"Versions": versions}) - def subscription_definition_version(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def subscription_definition_version( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -574,7 +598,6 @@ class GreengrassResponse(BaseResponse): return self.list_groups() def create_group(self) -> TYPE_RESPONSE: - initial_version = self._get_param("InitialVersion") name = self._get_param("Name") res = self.greengrass_backend.create_group( @@ -583,7 +606,6 @@ class GreengrassResponse(BaseResponse): return 201, {"status": 201}, json.dumps(res.to_dict()) def list_groups(self) -> TYPE_RESPONSE: - res = self.greengrass_backend.list_groups() return ( 200, @@ -621,7 +643,9 @@ class GreengrassResponse(BaseResponse): self.greengrass_backend.update_group(group_id=group_id, name=name) return 200, {"status": 200}, json.dumps({}) - def group_versions(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def group_versions( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -631,7 +655,6 @@ class GreengrassResponse(BaseResponse): return self.list_group_versions() def create_group_version(self) -> TYPE_RESPONSE: - group_id = self.path.split("/")[-2] core_definition_version_arn = self._get_param("CoreDefinitionVersionArn") @@ -672,7 +695,6 @@ class GreengrassResponse(BaseResponse): return self.get_group_version() def get_group_version(self) -> TYPE_RESPONSE: - group_id = self.path.split("/")[-3] group_version_id = self.path.split("/")[-1] res = self.greengrass_backend.get_group_version( @@ -691,7 +713,6 @@ class GreengrassResponse(BaseResponse): return self.list_deployments() def create_deployment(self) -> TYPE_RESPONSE: - group_id = self.path.split("/")[-2] group_version_id = self._get_param("GroupVersionId") deployment_type = self._get_param("DeploymentType") @@ -721,7 +742,9 @@ class GreengrassResponse(BaseResponse): json.dumps({"Deployments": deployments}), ) - def deployment_satus(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def deployment_satus( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "GET": @@ -737,7 +760,9 @@ class GreengrassResponse(BaseResponse): ) return 200, {"status": 200}, json.dumps(res.to_dict()) - def deployments_reset(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def deployments_reset( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if self.method == "POST": @@ -764,7 +789,6 @@ class GreengrassResponse(BaseResponse): return self.disassociate_role_from_group() def associate_role_to_group(self) -> TYPE_RESPONSE: - group_id = self.path.split("/")[-2] role_arn = self._get_param("RoleArn") res = self.greengrass_backend.associate_role_to_group( @@ -774,7 +798,6 @@ class GreengrassResponse(BaseResponse): return 200, {"status": 200}, json.dumps(res.to_dict()) def get_associated_role(self) -> TYPE_RESPONSE: - group_id = self.path.split("/")[-2] res = self.greengrass_backend.get_associated_role( group_id=group_id, diff --git a/moto/guardduty/exceptions.py b/moto/guardduty/exceptions.py index 51483a9e6..3c21d7082 100644 --- a/moto/guardduty/exceptions.py +++ b/moto/guardduty/exceptions.py @@ -16,9 +16,7 @@ class DetectorNotFoundException(GuardDutyException): "The request is rejected because the input detectorId is not owned by the current account.", ) - def get_headers( - self, *args: Any, **kwargs: Any - ) -> List[Tuple[str, str]]: # pylint: disable=unused-argument + def get_headers(self, *args: Any, **kwargs: Any) -> List[Tuple[str, str]]: # pylint: disable=unused-argument return [("X-Amzn-ErrorType", "BadRequestException")] @@ -31,7 +29,5 @@ class FilterNotFoundException(GuardDutyException): "The request is rejected since no such resource found.", ) - def get_headers( - self, *args: Any, **kwargs: Any - ) -> List[Tuple[str, str]]: # pylint: disable=unused-argument + def get_headers(self, *args: Any, **kwargs: Any) -> List[Tuple[str, str]]: # pylint: disable=unused-argument return [("X-Amzn-ErrorType", "BadRequestException")] diff --git a/moto/iam/config.py b/moto/iam/config.py index 8d929bcb1..7443097c1 100644 --- a/moto/iam/config.py +++ b/moto/iam/config.py @@ -138,7 +138,6 @@ class RoleConfigQuery(ConfigQueryModel[IAMBackend]): backend_region: Optional[str] = None, resource_region: Optional[str] = None, ) -> Optional[Dict[str, Any]]: - role = self.backends[account_id]["global"].roles.get(resource_id) if not role: diff --git a/moto/iam/models.py b/moto/iam/models.py index b998da051..0cb6a61f1 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -140,7 +140,6 @@ class VirtualMfaDevice: class Policy(CloudFormationModel): - # Note: This class does not implement the CloudFormation support for AWS::IAM::Policy, as that CF resource # is for creating *inline* policies. That is done in class InlinePolicy. @@ -464,7 +463,9 @@ class AWSManagedPolicy(ManagedPolicy): """AWS-managed policy.""" @classmethod - def from_data(cls, name: str, account_id: str, data: Dict[str, Any]) -> "AWSManagedPolicy": # type: ignore[misc] + def from_data( + cls, name: str, account_id: str, data: Dict[str, Any] + ) -> "AWSManagedPolicy": # type: ignore[misc] return cls( name, account_id=account_id, diff --git a/moto/iam/policy_validation.py b/moto/iam/policy_validation.py index 02eea188f..95dd75772 100644 --- a/moto/iam/policy_validation.py +++ b/moto/iam/policy_validation.py @@ -222,7 +222,9 @@ class BaseIAMPolicyValidator: ) @staticmethod - def _validate_string_or_list_of_strings_syntax(statement: Dict[str, Any], key: str) -> None: # type: ignore[misc] + def _validate_string_or_list_of_strings_syntax( + statement: Dict[str, Any], key: str + ) -> None: # type: ignore[misc] if key in statement: assert isinstance(statement[key], (str, list)) if isinstance(statement[key], list): @@ -454,7 +456,9 @@ class BaseIAMPolicyValidator: assert resource[2] != "" @staticmethod - def _legacy_parse_condition(condition_key: str, condition_value: Dict[str, Any]) -> None: # type: ignore[misc] + def _legacy_parse_condition( + condition_key: str, condition_value: Dict[str, Any] + ) -> None: # type: ignore[misc] stripped_condition_key = IAMPolicyDocumentValidator._strip_condition_key( condition_key ) diff --git a/moto/identitystore/exceptions.py b/moto/identitystore/exceptions.py index 1f064ff62..5ac13e64b 100644 --- a/moto/identitystore/exceptions.py +++ b/moto/identitystore/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the identitystore service.""" + import json from typing import Any diff --git a/moto/identitystore/responses.py b/moto/identitystore/responses.py index 6c76e040b..dd3ed4afd 100644 --- a/moto/identitystore/responses.py +++ b/moto/identitystore/responses.py @@ -1,4 +1,5 @@ """Handles incoming identitystore requests, invokes methods, returns responses.""" + import json from typing import Any, Dict, NamedTuple, Optional @@ -161,7 +162,10 @@ class IdentityStoreResponse(BaseResponse): max_results = self._get_param("MaxResults") next_token = self._get_param("NextToken") filters = self._get_param("Filters") - (groups, next_token,) = self.identitystore_backend.list_groups( + ( + groups, + next_token, + ) = self.identitystore_backend.list_groups( identity_store_id=identity_store_id, max_results=max_results, next_token=next_token, @@ -198,7 +202,10 @@ class IdentityStoreResponse(BaseResponse): max_results = self._get_param("MaxResults") next_token = self._get_param("NextToken") filters = self._get_param("Filters") - (users, next_token,) = self.identitystore_backend.list_users( + ( + users, + next_token, + ) = self.identitystore_backend.list_users( identity_store_id=identity_store_id, max_results=max_results, next_token=next_token, diff --git a/moto/identitystore/urls.py b/moto/identitystore/urls.py index 11696e6dd..77f8b1804 100644 --- a/moto/identitystore/urls.py +++ b/moto/identitystore/urls.py @@ -1,4 +1,5 @@ """identitystore base URL and path.""" + from .responses import IdentityStoreResponse url_bases = [ diff --git a/moto/inspector2/urls.py b/moto/inspector2/urls.py index 678e8f3cc..a9c12da41 100644 --- a/moto/inspector2/urls.py +++ b/moto/inspector2/urls.py @@ -1,4 +1,5 @@ """inspector2 base URL and path.""" + from .responses import Inspector2Response url_bases = [ diff --git a/moto/iot/models.py b/moto/iot/models.py index 80d8498e4..7b0e2a48c 100644 --- a/moto/iot/models.py +++ b/moto/iot/models.py @@ -625,12 +625,12 @@ class IoTBackend(BaseBackend): self.ca_certificates: Dict[str, FakeCaCertificate] = OrderedDict() self.certificates: Dict[str, FakeCertificate] = OrderedDict() self.policies: Dict[str, FakePolicy] = OrderedDict() - self.principal_policies: Dict[ - Tuple[str, str], Tuple[str, FakePolicy] - ] = OrderedDict() - self.principal_things: Dict[ - Tuple[str, str], Tuple[str, FakeThing] - ] = OrderedDict() + self.principal_policies: Dict[Tuple[str, str], Tuple[str, FakePolicy]] = ( + OrderedDict() + ) + self.principal_things: Dict[Tuple[str, str], Tuple[str, FakeThing]] = ( + OrderedDict() + ) self.rules: Dict[str, FakeRule] = OrderedDict() self.endpoint: Optional[FakeEndpoint] = None self.domain_configurations: Dict[str, FakeDomainConfiguration] = OrderedDict() @@ -1459,8 +1459,11 @@ class IoTBackend(BaseBackend): attributes = attribute_payload["attributes"] if attributes: # might not exist yet, for example when the thing group was created without attributes - current_attribute_payload = thing_group.thing_group_properties.setdefault( - "attributePayload", {"attributes": {}} # type: ignore + current_attribute_payload = ( + thing_group.thing_group_properties.setdefault( + "attributePayload", + {"attributes": {}}, # type: ignore + ) ) if not do_merge: current_attribute_payload["attributes"] = attributes # type: ignore @@ -1469,9 +1472,9 @@ class IoTBackend(BaseBackend): elif attribute_payload is not None and "attributes" not in attribute_payload: thing_group.attributes = {} # type: ignore if "thingGroupDescription" in thing_group_properties: - thing_group.thing_group_properties[ - "thingGroupDescription" - ] = thing_group_properties["thingGroupDescription"] + thing_group.thing_group_properties["thingGroupDescription"] = ( + thing_group_properties["thingGroupDescription"] + ) thing_group.version = thing_group.version + 1 return thing_group.version diff --git a/moto/iotdata/models.py b/moto/iotdata/models.py index 481da8f65..4a17ca81a 100644 --- a/moto/iotdata/models.py +++ b/moto/iotdata/models.py @@ -44,7 +44,9 @@ class FakeShadow(BaseModel): ) @classmethod - def create_from_previous_version(cls, previous_shadow: Optional["FakeShadow"], payload: Optional[Dict[str, Any]]) -> "FakeShadow": # type: ignore[misc] + def create_from_previous_version( + cls, previous_shadow: Optional["FakeShadow"], payload: Optional[Dict[str, Any]] + ) -> "FakeShadow": # type: ignore[misc] """ set None to payload when you want to delete shadow """ diff --git a/moto/ivs/exceptions.py b/moto/ivs/exceptions.py index 73771dd07..821066877 100644 --- a/moto/ivs/exceptions.py +++ b/moto/ivs/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the ivs service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/ivs/models.py b/moto/ivs/models.py index 344d1f12d..dbea7b278 100644 --- a/moto/ivs/models.py +++ b/moto/ivs/models.py @@ -1,4 +1,5 @@ """IVSBackend class with methods for supported APIs.""" + from typing import Any, Dict, List, Optional, Tuple from moto.core.base_backend import BackendDict, BaseBackend diff --git a/moto/ivs/responses.py b/moto/ivs/responses.py index d3552b6b6..612f56c91 100644 --- a/moto/ivs/responses.py +++ b/moto/ivs/responses.py @@ -1,4 +1,5 @@ """Handles incoming ivs requests, invokes methods, returns responses.""" + import json from moto.core.responses import BaseResponse diff --git a/moto/ivs/urls.py b/moto/ivs/urls.py index 89e0a2acd..471d0e355 100644 --- a/moto/ivs/urls.py +++ b/moto/ivs/urls.py @@ -1,4 +1,5 @@ """ivs base URL and path.""" + from .responses import IVSResponse url_bases = [ diff --git a/moto/kinesis/models.py b/moto/kinesis/models.py index bab8acda0..cb61030d3 100644 --- a/moto/kinesis/models.py +++ b/moto/kinesis/models.py @@ -366,7 +366,7 @@ class Stream(CloudFormationModel): [s for s in shard_list[0:-1:2]], [s for s in shard_list[1::2]] ) - for (shard, adjacent) in adjacent_shards: + for shard, adjacent in adjacent_shards: self.merge_shards(shard.shard_id, adjacent.shard_id) required_shard_merges -= 1 if required_shard_merges == 0: @@ -683,7 +683,9 @@ class KinesisBackend(BaseBackend): ) next_shard_iterator = compose_shard_iterator( - stream_name, shard, last_sequence_id # type: ignore + stream_name, + shard, + last_sequence_id, # type: ignore ) return next_shard_iterator, records, millis_behind_latest @@ -732,7 +734,9 @@ class KinesisBackend(BaseBackend): data = record.get("Data") sequence_number, shard_id = stream.put_record( - partition_key, explicit_hash_key, data # type: ignore[arg-type] + partition_key, + explicit_hash_key, + data, # type: ignore[arg-type] ) response["Records"].append( {"SequenceNumber": sequence_number, "ShardId": shard_id} diff --git a/moto/kms/policy_validator.py b/moto/kms/policy_validator.py index 1d1a78148..c1df13f3f 100644 --- a/moto/kms/policy_validator.py +++ b/moto/kms/policy_validator.py @@ -44,7 +44,8 @@ def action_matches(applicable_actions: List[str], action: str) -> bool: def resource_matches( - applicable_resources: str, resource: str # pylint: disable=unused-argument + applicable_resources: str, + resource: str, # pylint: disable=unused-argument ) -> bool: if applicable_resources == "*": return True diff --git a/moto/lakeformation/exceptions.py b/moto/lakeformation/exceptions.py index 86727c390..9041402ca 100644 --- a/moto/lakeformation/exceptions.py +++ b/moto/lakeformation/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the lakeformation service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/lakeformation/responses.py b/moto/lakeformation/responses.py index b3e1af6bd..fe8c05fef 100644 --- a/moto/lakeformation/responses.py +++ b/moto/lakeformation/responses.py @@ -1,4 +1,5 @@ """Handles incoming lakeformation requests, invokes methods, returns responses.""" + import json from typing import Any, Dict diff --git a/moto/lakeformation/urls.py b/moto/lakeformation/urls.py index fa6458e10..1f4a4a1a1 100644 --- a/moto/lakeformation/urls.py +++ b/moto/lakeformation/urls.py @@ -1,4 +1,5 @@ """lakeformation base URL and path.""" + from .responses import LakeFormationResponse url_bases = [ diff --git a/moto/logs/models.py b/moto/logs/models.py index 3f244b060..b2057d18a 100644 --- a/moto/logs/models.py +++ b/moto/logs/models.py @@ -183,7 +183,6 @@ class LogStream(BaseModel): self.upload_sequence_token += 1 for subscription_filter in self.log_group.subscription_filters.values(): - service = subscription_filter.destination_arn.split(":")[2] formatted_log_events = [ { @@ -208,7 +207,6 @@ class LogStream(BaseModel): filter_name: str, log_events: List[Dict[str, Any]], ) -> None: - if service == "lambda": from moto.awslambda.utils import get_backend @@ -1210,7 +1208,6 @@ class LogsBackend(BaseBackend): end_time: int, query_string: str, ) -> str: - for log_group_name in log_group_names: if log_group_name not in self.groups: raise ResourceNotFoundException() diff --git a/moto/managedblockchain/exceptions.py b/moto/managedblockchain/exceptions.py index 5f7199929..f1b5ea369 100644 --- a/moto/managedblockchain/exceptions.py +++ b/moto/managedblockchain/exceptions.py @@ -11,17 +11,13 @@ class ManagedBlockchainClientError(JsonRESTError): self.message = message self.description = json.dumps({"message": self.message}) - def get_headers( - self, *args: Any, **kwargs: Any - ) -> List[Tuple[str, str]]: # pylint: disable=unused-argument + def get_headers(self, *args: Any, **kwargs: Any) -> List[Tuple[str, str]]: # pylint: disable=unused-argument return [ ("Content-Type", "application/json"), ("x-amzn-ErrorType", self.error_type), ] - def get_body( - self, *args: Any, **kwargs: Any - ) -> str: # pylint: disable=unused-argument + def get_body(self, *args: Any, **kwargs: Any) -> str: # pylint: disable=unused-argument return self.description diff --git a/moto/managedblockchain/models.py b/moto/managedblockchain/models.py index 79dc02cde..e3a05ca6e 100644 --- a/moto/managedblockchain/models.py +++ b/moto/managedblockchain/models.py @@ -415,9 +415,9 @@ class ManagedBlockchainMember(BaseModel): self.status = "DELETED" def update(self, logpublishingconfiguration: Dict[str, Any]) -> None: - self.member_configuration[ - "LogPublishingConfiguration" - ] = logpublishingconfiguration + self.member_configuration["LogPublishingConfiguration"] = ( + logpublishingconfiguration + ) class ManagedBlockchainNode(BaseModel): diff --git a/moto/mediaconnect/models.py b/moto/mediaconnect/models.py index 18b7d3ca3..59b7ecf2b 100644 --- a/moto/mediaconnect/models.py +++ b/moto/mediaconnect/models.py @@ -19,9 +19,9 @@ class Flow(BaseModel): self.source_failover_config = kwargs.get("source_failover_config", {}) self.sources = kwargs.get("sources", []) self.vpc_interfaces = kwargs.get("vpc_interfaces", []) - self.status: Optional[ - str - ] = "STANDBY" # one of 'STANDBY'|'ACTIVE'|'UPDATING'|'DELETING'|'STARTING'|'STOPPING'|'ERROR' + self.status: Optional[str] = ( + "STANDBY" # one of 'STANDBY'|'ACTIVE'|'UPDATING'|'DELETING'|'STARTING'|'STOPPING'|'ERROR' + ) self._previous_status: Optional[str] = None self.description = "A Moto test flow" self.flow_arn = f"arn:aws:mediaconnect:{region_name}:{account_id}:flow:{self.id}:{self.name}" @@ -262,9 +262,9 @@ class MediaConnectBackend(BaseBackend): output["destination"] = destination output["encryption"] = encryption output["maxLatency"] = max_latency - output[ - "mediaStreamOutputConfiguration" - ] = media_stream_output_configuration + output["mediaStreamOutputConfiguration"] = ( + media_stream_output_configuration + ) output["minLatency"] = min_latency output["port"] = port output["protocol"] = protocol @@ -328,9 +328,9 @@ class MediaConnectBackend(BaseBackend): source["maxBitrate"] = max_bitrate source["maxLatency"] = max_latency source["maxSyncBuffer"] = max_sync_buffer - source[ - "mediaStreamSourceConfigurations" - ] = media_stream_source_configurations + source["mediaStreamSourceConfigurations"] = ( + media_stream_source_configurations + ) source["minLatency"] = min_latency source["protocol"] = protocol source["senderControlPort"] = sender_control_port diff --git a/moto/moto_api/_internal/recorder/responses.py b/moto/moto_api/_internal/recorder/responses.py index 5937b5997..6223af433 100644 --- a/moto/moto_api/_internal/recorder/responses.py +++ b/moto/moto_api/_internal/recorder/responses.py @@ -8,32 +8,47 @@ from ... import recorder class RecorderResponse(BaseResponse): def reset_recording( - self, req: Any, url: str, headers: Any # pylint: disable=unused-argument + self, + req: Any, + url: str, + headers: Any, # pylint: disable=unused-argument ) -> TYPE_RESPONSE: recorder.reset_recording() return 200, {}, "" def start_recording( - self, req: Any, url: str, headers: Any # pylint: disable=unused-argument + self, + req: Any, + url: str, + headers: Any, # pylint: disable=unused-argument ) -> TYPE_RESPONSE: recorder.start_recording() return 200, {}, "Recording is set to True" def stop_recording( - self, req: Any, url: str, headers: Any # pylint: disable=unused-argument + self, + req: Any, + url: str, + headers: Any, # pylint: disable=unused-argument ) -> TYPE_RESPONSE: recorder.stop_recording() return 200, {}, "Recording is set to False" def upload_recording( - self, req: Any, url: str, headers: Any # pylint: disable=unused-argument + self, + req: Any, + url: str, + headers: Any, # pylint: disable=unused-argument ) -> TYPE_RESPONSE: data = req.data recorder.upload_recording(data) return 200, {}, "" def download_recording( - self, req: Any, url: str, headers: Any # pylint: disable=unused-argument + self, + req: Any, + url: str, + headers: Any, # pylint: disable=unused-argument ) -> TYPE_RESPONSE: data = recorder.download_recording() return 200, {}, data @@ -41,7 +56,10 @@ class RecorderResponse(BaseResponse): # NOTE: Replaying assumes, for simplicity, that it is the only action # running against moto at the time. No recording happens while replaying. def replay_recording( - self, req: Any, url: str, headers: Any # pylint: disable=unused-argument + self, + req: Any, + url: str, + headers: Any, # pylint: disable=unused-argument ) -> TYPE_RESPONSE: recorder.replay_recording(target_host=url) return 200, {}, "" diff --git a/moto/mq/responses.py b/moto/mq/responses.py index 2365076fe..8bb670c32 100644 --- a/moto/mq/responses.py +++ b/moto/mq/responses.py @@ -1,4 +1,5 @@ """Handles incoming mq requests, invokes methods, returns responses.""" + import json from urllib.parse import unquote diff --git a/moto/mq/urls.py b/moto/mq/urls.py index 7fde551b7..6a63e4d9a 100644 --- a/moto/mq/urls.py +++ b/moto/mq/urls.py @@ -1,4 +1,5 @@ """mq base URL and path.""" + from .responses import MQResponse url_bases = [ diff --git a/moto/neptune/__init__.py b/moto/neptune/__init__.py index f3eb618b4..e7353bb83 100644 --- a/moto/neptune/__init__.py +++ b/moto/neptune/__init__.py @@ -5,4 +5,5 @@ It shares almost everything with RDS: the endpoint URL, and the features. Only t Because the endpoint URL is the same (rds.amazonaws.com), every request is intercepted by the RDS service. RDS then has to determine whether any incoming call was meant for RDS, or for neptune. """ + from .models import neptune_backends # noqa: F401 diff --git a/moto/neptune/urls.py b/moto/neptune/urls.py index b8de05952..bcdbcb294 100644 --- a/moto/neptune/urls.py +++ b/moto/neptune/urls.py @@ -1,6 +1,7 @@ """ All calls to this service are intercepted by RDS """ + url_bases = [] # type: ignore[var-annotated] diff --git a/moto/opensearch/exceptions.py b/moto/opensearch/exceptions.py index b5b3f9fe9..55504a02d 100644 --- a/moto/opensearch/exceptions.py +++ b/moto/opensearch/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the opensearch service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/opensearch/responses.py b/moto/opensearch/responses.py index ed13c48e1..e8f102e14 100644 --- a/moto/opensearch/responses.py +++ b/moto/opensearch/responses.py @@ -1,4 +1,5 @@ """Handles incoming opensearch requests, invokes methods, returns responses.""" + import json from moto.core.responses import BaseResponse diff --git a/moto/opensearch/urls.py b/moto/opensearch/urls.py index 835c3e085..0d7d1a4cf 100644 --- a/moto/opensearch/urls.py +++ b/moto/opensearch/urls.py @@ -1,4 +1,5 @@ """opensearch base URL and path.""" + from .responses import OpenSearchServiceResponse url_bases = [r"https?://es\.(.+)\.amazonaws\.com"] diff --git a/moto/opsworks/models.py b/moto/opsworks/models.py index 497b91a58..cf153a175 100644 --- a/moto/opsworks/models.py +++ b/moto/opsworks/models.py @@ -40,7 +40,6 @@ class OpsworkInstance(BaseModel): associate_public_ip: Optional[str] = None, security_group_ids: Optional[List[str]] = None, ): - self.ec2_backend = ec2_backend self.instance_profile_arn = instance_profile_arn @@ -320,7 +319,6 @@ class Stack(BaseModel): default_root_device_type: str = "instance-store", agent_version: str = "LATEST", ): - self.name = name self.region = region self.service_role_arn = service_role_arn @@ -590,7 +588,9 @@ class OpsWorksBackend(BaseBackend): raise ResourceNotFoundException(", ".join(unknown_apps)) return [self.apps[id].to_dict() for id in app_ids] - def describe_instances(self, instance_ids: List[str], layer_id: str, stack_id: str) -> List[Dict[str, Any]]: # type: ignore[return] + def describe_instances( + self, instance_ids: List[str], layer_id: str, stack_id: str + ) -> List[Dict[str, Any]]: # type: ignore[return] if len(list(filter(None, (instance_ids, layer_id, stack_id)))) != 1: raise ValidationException( "Please provide either one or more " diff --git a/moto/packages/boto/ec2/ec2object.py b/moto/packages/boto/ec2/ec2object.py index d8a869701..7c11a6391 100644 --- a/moto/packages/boto/ec2/ec2object.py +++ b/moto/packages/boto/ec2/ec2object.py @@ -23,6 +23,7 @@ """ Represents an EC2 Object """ + from typing import Any from moto.packages.boto.ec2.tag import TagSet diff --git a/moto/packages/boto/ec2/instance.py b/moto/packages/boto/ec2/instance.py index f529fc2b5..2ab190770 100644 --- a/moto/packages/boto/ec2/instance.py +++ b/moto/packages/boto/ec2/instance.py @@ -24,6 +24,7 @@ """ Represents an EC2 Instance """ + from typing import Any from moto.packages.boto.ec2.ec2object import EC2Object, TaggedEC2Object diff --git a/moto/packages/cfnresponse/cfnresponse.py b/moto/packages/cfnresponse/cfnresponse.py index 18eb3944f..870fcdbdc 100644 --- a/moto/packages/cfnresponse/cfnresponse.py +++ b/moto/packages/cfnresponse/cfnresponse.py @@ -58,5 +58,4 @@ def send( print("Status code:", response.status) # noqa: T201 except Exception as e: - print("send(..) failed executing http.request(..):", e) # noqa: T201 diff --git a/moto/personalize/exceptions.py b/moto/personalize/exceptions.py index bcb41360d..bf6256485 100644 --- a/moto/personalize/exceptions.py +++ b/moto/personalize/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the personalize service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/personalize/responses.py b/moto/personalize/responses.py index b6aa2f13a..c80ade0ab 100644 --- a/moto/personalize/responses.py +++ b/moto/personalize/responses.py @@ -1,4 +1,5 @@ """Handles incoming personalize requests, invokes methods, returns responses.""" + import json from moto.core.responses import BaseResponse diff --git a/moto/personalize/urls.py b/moto/personalize/urls.py index 731bcdcab..580eb1b48 100644 --- a/moto/personalize/urls.py +++ b/moto/personalize/urls.py @@ -1,4 +1,5 @@ """personalize base URL and path.""" + from .responses import PersonalizeResponse url_bases = [ diff --git a/moto/pinpoint/exceptions.py b/moto/pinpoint/exceptions.py index 817fab862..abdc59791 100644 --- a/moto/pinpoint/exceptions.py +++ b/moto/pinpoint/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the pinpoint service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/pinpoint/responses.py b/moto/pinpoint/responses.py index c323fa7a5..3e82c291e 100644 --- a/moto/pinpoint/responses.py +++ b/moto/pinpoint/responses.py @@ -1,4 +1,5 @@ """Handles incoming pinpoint requests, invokes methods, returns responses.""" + import json from typing import Any from urllib.parse import unquote @@ -85,7 +86,8 @@ class PinpointResponse(BaseResponse): resource_arn = unquote(self.path).split("/tags/")[-1] tag_keys = self.querystring.get("tagKeys") self.pinpoint_backend.untag_resource( - resource_arn=resource_arn, tag_keys=tag_keys # type: ignore[arg-type] + resource_arn=resource_arn, + tag_keys=tag_keys, # type: ignore[arg-type] ) return 200, {}, "{}" diff --git a/moto/pinpoint/urls.py b/moto/pinpoint/urls.py index 4963a3d20..a6909bf82 100644 --- a/moto/pinpoint/urls.py +++ b/moto/pinpoint/urls.py @@ -1,4 +1,5 @@ """pinpoint base URL and path.""" + from .responses import PinpointResponse url_bases = [ diff --git a/moto/quicksight/exceptions.py b/moto/quicksight/exceptions.py index c456fe015..b9b571e46 100644 --- a/moto/quicksight/exceptions.py +++ b/moto/quicksight/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the quicksight service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/quicksight/responses.py b/moto/quicksight/responses.py index 8678fce72..08fa515b7 100644 --- a/moto/quicksight/responses.py +++ b/moto/quicksight/responses.py @@ -1,4 +1,5 @@ """Handles incoming quicksight requests, invokes methods, returns responses.""" + import json from typing import Any diff --git a/moto/quicksight/urls.py b/moto/quicksight/urls.py index 35b0c8193..2fc22d041 100644 --- a/moto/quicksight/urls.py +++ b/moto/quicksight/urls.py @@ -1,4 +1,5 @@ """quicksight base URL and path.""" + from .responses import QuickSightResponse url_bases = [ diff --git a/moto/rds/models.py b/moto/rds/models.py index ace7eaf05..8125b544d 100644 --- a/moto/rds/models.py +++ b/moto/rds/models.py @@ -1902,9 +1902,9 @@ class RDSBackend(BaseBackend): database = self.describe_db_instances(db_instance_identifier)[0] if "new_db_instance_identifier" in db_kwargs: del self.databases[db_instance_identifier] - db_instance_identifier = db_kwargs[ - "db_instance_identifier" - ] = db_kwargs.pop("new_db_instance_identifier") + db_instance_identifier = db_kwargs["db_instance_identifier"] = ( + db_kwargs.pop("new_db_instance_identifier") + ) self.databases[db_instance_identifier] = database preferred_backup_window = db_kwargs.get("preferred_backup_window") preferred_maintenance_window = db_kwargs.get("preferred_maintenance_window") @@ -2615,9 +2615,9 @@ class RDSBackend(BaseBackend): raise DBClusterSnapshotNotFoundError(snapshot_id) if snapshot_type == "snapshot": - snapshot: Union[ - DatabaseSnapshot, ClusterSnapshot - ] = self.database_snapshots[snapshot_id] + snapshot: Union[DatabaseSnapshot, ClusterSnapshot] = ( + self.database_snapshots[snapshot_id] + ) else: snapshot = self.cluster_snapshots[snapshot_id] @@ -2765,7 +2765,9 @@ class RDSBackend(BaseBackend): "InvalidParameterValue", f"Invalid resource name: {arn}" ) - def add_tags_to_resource(self, arn: str, tags: List[Dict[str, str]]) -> List[Dict[str, str]]: # type: ignore[return] + def add_tags_to_resource( + self, arn: str, tags: List[Dict[str, str]] + ) -> List[Dict[str, str]]: # type: ignore[return] if self.arn_regex.match(arn): arn_breakdown = arn.split(":") resource_type = arn_breakdown[-2] @@ -2822,7 +2824,9 @@ class RDSBackend(BaseBackend): raise InvalidParameterCombination(str(e)) @staticmethod - def _merge_tags(old_tags: List[Dict[str, Any]], new_tags: List[Dict[str, Any]]) -> List[Dict[str, Any]]: # type: ignore[misc] + def _merge_tags( + old_tags: List[Dict[str, Any]], new_tags: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: # type: ignore[misc] tags_dict = dict() tags_dict.update({d["Key"]: d["Value"] for d in old_tags}) tags_dict.update({d["Key"]: d["Value"] for d in new_tags}) @@ -3146,14 +3150,16 @@ class OptionGroup: return template.render(option_group=self) def remove_options( - self, options_to_remove: Any # pylint: disable=unused-argument + self, + options_to_remove: Any, # pylint: disable=unused-argument ) -> None: # TODO: Check for option in self.options and remove if exists. Raise # error otherwise return def add_options( - self, options_to_add: Any # pylint: disable=unused-argument + self, + options_to_add: Any, # pylint: disable=unused-argument ) -> None: # TODO: Validate option and add it to self.options. If invalid raise # error diff --git a/moto/rds/utils.py b/moto/rds/utils.py index 546274afe..41dabc435 100644 --- a/moto/rds/utils.py +++ b/moto/rds/utils.py @@ -22,7 +22,6 @@ FilterDef = namedtuple( class DbInstanceEngine(str, Enum): - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/rds/client/create_db_instance.html # 2023-11-08 AURORA_MYSQL = "aurora-mysql" diff --git a/moto/rdsdata/urls.py b/moto/rdsdata/urls.py index 2e9f60128..9682366e7 100644 --- a/moto/rdsdata/urls.py +++ b/moto/rdsdata/urls.py @@ -1,4 +1,5 @@ """rdsdata base URL and path.""" + from .responses import RDSDataServiceResponse url_bases = [ diff --git a/moto/redshift/exceptions.py b/moto/redshift/exceptions.py index 7d50599a1..b087457bb 100644 --- a/moto/redshift/exceptions.py +++ b/moto/redshift/exceptions.py @@ -87,7 +87,6 @@ class InvalidParameterValueError(RedshiftClientError): class ResourceNotFoundFaultError(RedshiftClientError): - code = 404 def __init__( diff --git a/moto/redshift/models.py b/moto/redshift/models.py index c4aa62f60..5f66e77cc 100644 --- a/moto/redshift/models.py +++ b/moto/redshift/models.py @@ -329,9 +329,9 @@ class Cluster(TaggableResourceMixin, CloudFormationModel): "EstimatedTimeToCompletionInSeconds": 123, } if self.cluster_snapshot_copy_status is not None: - json_response[ - "ClusterSnapshotCopyStatus" - ] = self.cluster_snapshot_copy_status + json_response["ClusterSnapshotCopyStatus"] = ( + self.cluster_snapshot_copy_status + ) return json_response diff --git a/moto/redshift/responses.py b/moto/redshift/responses.py index 89a3ed3bd..bba31b5d0 100644 --- a/moto/redshift/responses.py +++ b/moto/redshift/responses.py @@ -269,7 +269,7 @@ class RedshiftResponse(BaseResponse): cluster_kwargs = {} # We only want parameters that were actually passed in, otherwise # we'll stomp all over our cluster metadata with None values. - for (key, value) in request_kwargs.items(): + for key, value in request_kwargs.items(): if value is not None and value != []: cluster_kwargs[key] = value diff --git a/moto/rekognition/models.py b/moto/rekognition/models.py index d526738f4..d7f05d7ba 100644 --- a/moto/rekognition/models.py +++ b/moto/rekognition/models.py @@ -48,7 +48,13 @@ class RekognitionBackend(BaseBackend): def compare_faces( self, - ) -> Tuple[List[Dict[str, Any]], str, str, List[Dict[str, Any]], Dict[str, Any],]: + ) -> Tuple[ + List[Dict[str, Any]], + str, + str, + List[Dict[str, Any]], + Dict[str, Any], + ]: return ( self._face_matches(), "ROTATE_90", diff --git a/moto/rekognition/urls.py b/moto/rekognition/urls.py index ce2369472..2f7c98992 100644 --- a/moto/rekognition/urls.py +++ b/moto/rekognition/urls.py @@ -1,4 +1,5 @@ """rekognition base URL and path.""" + from .responses import RekognitionResponse url_bases = [ diff --git a/moto/resiliencehub/exceptions.py b/moto/resiliencehub/exceptions.py index 8eac01a43..a740d51b4 100644 --- a/moto/resiliencehub/exceptions.py +++ b/moto/resiliencehub/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the resiliencehub service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/resiliencehub/urls.py b/moto/resiliencehub/urls.py index 09e96ef08..6ee138103 100644 --- a/moto/resiliencehub/urls.py +++ b/moto/resiliencehub/urls.py @@ -1,4 +1,5 @@ """resiliencehub base URL and path.""" + from .responses import ResilienceHubResponse url_bases = [ diff --git a/moto/resourcegroupstaggingapi/models.py b/moto/resourcegroupstaggingapi/models.py index fcdb8e489..0e3495404 100644 --- a/moto/resourcegroupstaggingapi/models.py +++ b/moto/resourcegroupstaggingapi/models.py @@ -104,7 +104,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend): @property def sns_backend(self) -> SNSBackend: return sns_backends[self.account_id][self.region_name] - + @property def ssm_backend(self) -> SimpleSystemManagerBackend: return ssm_backends[self.account_id][self.region_name] @@ -522,15 +522,14 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend): continue yield {"ResourceARN": f"{topic.arn}", "Tags": tags} - - # SSM + # SSM if not resource_type_filters or "ssm" in resource_type_filters: for document in self.ssm_backend._documents.values(): doc_name = document.describe()["Name"] tags = self.ssm_backend._get_documents_tags(doc_name) if not tags or not tag_filter( tags - ): # Skip if no tags, or invalid filter + ): # Skip if no tags, or invalid filter continue yield { "ResourceARN": f"arn:aws:ssm:{self.region_name}:{self.account_id}:document/{doc_name}", diff --git a/moto/route53/exceptions.py b/moto/route53/exceptions.py index 9d236459a..eb966b70f 100644 --- a/moto/route53/exceptions.py +++ b/moto/route53/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the Route53 service.""" + from typing import Any, Optional from moto.core.exceptions import RESTError @@ -161,7 +162,6 @@ class QueryLoggingConfigAlreadyExists(Route53ClientError): class InvalidChangeBatch(Route53ClientError): - code = 400 def __init__(self) -> None: diff --git a/moto/route53/models.py b/moto/route53/models.py index 063081dee..cdea39895 100644 --- a/moto/route53/models.py +++ b/moto/route53/models.py @@ -1,4 +1,5 @@ """Route53Backend class with methods for supported APIs.""" + import copy import itertools import re @@ -284,7 +285,9 @@ class RecordSet(CloudFormationModel): return self.name def delete( - self, account_id: str, region: str # pylint: disable=unused-argument + self, + account_id: str, + region: str, # pylint: disable=unused-argument ) -> None: """Not exposed as part of the Route 53 API - used for CloudFormation""" backend = route53_backends[account_id]["global"] @@ -498,7 +501,6 @@ class RecordSetGroup(CloudFormationModel): class QueryLoggingConfig(BaseModel): - """QueryLoggingConfig class; this object isn't part of Cloudformation.""" def __init__( diff --git a/moto/route53/responses.py b/moto/route53/responses.py index f8fa41919..084c003f2 100644 --- a/moto/route53/responses.py +++ b/moto/route53/responses.py @@ -1,4 +1,5 @@ """Handles Route53 API requests, invokes method and returns response.""" + import re from typing import Any from urllib.parse import parse_qs @@ -35,7 +36,9 @@ class Route53(BaseResponse): def backend(self) -> Route53Backend: return route53_backends[self.current_account]["global"] - def list_or_create_hostzone_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def list_or_create_hostzone_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) # Set these here outside the scope of the try/except @@ -133,7 +136,9 @@ class Route53(BaseResponse): template = Template(GET_HOSTED_ZONE_COUNT_RESPONSE) return 200, headers, template.render(zone_count=num_zones, xmlns=XMLNS) - def get_or_delete_hostzone_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def get_or_delete_hostzone_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) zoneid = self.parsed_url.path.rstrip("/").rsplit("/", 1)[1] @@ -153,7 +158,9 @@ class Route53(BaseResponse): template = Template(UPDATE_HOSTED_ZONE_COMMENT_RESPONSE) return 200, headers, template.render(zone=zone) - def get_dnssec_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def get_dnssec_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] # returns static response # TODO: implement enable/disable dnssec apis self.setup_class(request, full_url, headers) @@ -205,7 +212,9 @@ class Route53(BaseResponse): template = Template(DISASSOCIATE_VPC_RESPONSE) return 200, headers, template.render(comment=comment) - def rrset_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def rrset_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) method = request.method @@ -276,7 +285,9 @@ class Route53(BaseResponse): ) return 200, headers, r_template - def health_check_response1(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def health_check_response1( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) method = request.method @@ -316,7 +327,9 @@ class Route53(BaseResponse): template.render(health_checks=health_checks, xmlns=XMLNS), ) - def health_check_response2(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def health_check_response2( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) method = request.method @@ -352,7 +365,9 @@ class Route53(BaseResponse): template = Template(UPDATE_HEALTH_CHECK_RESPONSE) return 200, headers, template.render(health_check=health_check) - def health_check_status_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def health_check_status_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) method = request.method @@ -387,7 +402,9 @@ class Route53(BaseResponse): f"The action for {action} has not been implemented for route 53" ) - def list_or_change_tags_for_resource_request(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def list_or_change_tags_for_resource_request( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) id_ = self.parsed_url.path.split("/")[-1] @@ -422,7 +439,9 @@ class Route53(BaseResponse): template = Template(GET_CHANGE_RESPONSE) return 200, headers, template.render(change_id=change_id, xmlns=XMLNS) - def list_or_create_query_logging_config_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def list_or_create_query_logging_config_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "POST": @@ -449,7 +468,10 @@ class Route53(BaseResponse): # The paginator picks up named arguments, returns tuple. # pylint: disable=unbalanced-tuple-unpacking - (all_configs, next_token,) = self.backend.list_query_logging_configs( + ( + all_configs, + next_token, + ) = self.backend.list_query_logging_configs( hosted_zone_id=hosted_zone_id, next_token=next_token, max_results=max_results, @@ -466,7 +488,9 @@ class Route53(BaseResponse): ), ) - def get_or_delete_query_logging_config_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def get_or_delete_query_logging_config_response( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) query_logging_config_id = self.parsed_url.path.rstrip("/").rsplit("/", 1)[1] @@ -485,7 +509,9 @@ class Route53(BaseResponse): self.backend.delete_query_logging_config(query_logging_config_id) return 200, headers, "" - def reusable_delegation_sets(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def reusable_delegation_sets( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "GET": delegation_sets = self.backend.list_reusable_delegation_sets() @@ -515,7 +541,9 @@ class Route53(BaseResponse): template.render(delegation_set=delegation_set), ) - def reusable_delegation_set(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def reusable_delegation_set( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) ds_id = self.parsed_url.path.rstrip("/").rsplit("/")[-1] if request.method == "GET": diff --git a/moto/route53/urls.py b/moto/route53/urls.py index 156fc127d..7f18fd3cc 100644 --- a/moto/route53/urls.py +++ b/moto/route53/urls.py @@ -1,4 +1,5 @@ """Route53 base URL and path.""" + from typing import Any from moto.core.common_types import TYPE_RESPONSE diff --git a/moto/route53/utils.py b/moto/route53/utils.py index 31060101a..abbe46850 100644 --- a/moto/route53/utils.py +++ b/moto/route53/utils.py @@ -1,4 +1,5 @@ """Pagination control model for Route53.""" + from .exceptions import InvalidPaginationToken PAGINATION_MODEL = { diff --git a/moto/route53domains/models.py b/moto/route53domains/models.py index 3208b6f8d..9c33ccb09 100644 --- a/moto/route53domains/models.py +++ b/moto/route53domains/models.py @@ -80,7 +80,6 @@ class Route53DomainsBackend(BaseBackend): ) try: - domain = Route53Domain.validate( domain_name=domain_name, auto_renew=auto_renew, @@ -204,7 +203,6 @@ class Route53DomainsBackend(BaseBackend): sort_by: Optional[str] = None, sort_order: Optional[str] = None, ) -> List[Route53DomainsOperation]: - input_errors: List[str] = [] statuses = statuses or [] types = types or [] diff --git a/moto/route53domains/urls.py b/moto/route53domains/urls.py index 500444d22..650575068 100644 --- a/moto/route53domains/urls.py +++ b/moto/route53domains/urls.py @@ -1,4 +1,5 @@ """route53domains base URL and path.""" + from .responses import Route53DomainsResponse url_bases = [ diff --git a/moto/route53domains/validators.py b/moto/route53domains/validators.py index b5bfb0eb2..9d2f32c19 100644 --- a/moto/route53domains/validators.py +++ b/moto/route53domains/validators.py @@ -665,7 +665,6 @@ class Route53DomainsOperation(BaseModel): message: Optional[str] = None, status_flag: Optional[str] = None, ): - id_ = str(mock_random.uuid4()) submitted_date = datetime.now(timezone.utc) last_updated_date = datetime.now(timezone.utc) diff --git a/moto/route53resolver/exceptions.py b/moto/route53resolver/exceptions.py index 7149985ce..77016cfc7 100644 --- a/moto/route53resolver/exceptions.py +++ b/moto/route53resolver/exceptions.py @@ -29,7 +29,6 @@ class RRValidationException(JsonRESTError): class InvalidNextTokenException(JsonRESTError): - code = 400 def __init__(self) -> None: @@ -40,7 +39,6 @@ class InvalidNextTokenException(JsonRESTError): class InvalidParameterException(JsonRESTError): - code = 400 def __init__(self, message: str): @@ -48,7 +46,6 @@ class InvalidParameterException(JsonRESTError): class InvalidRequestException(JsonRESTError): - code = 400 def __init__(self, message: str): @@ -56,7 +53,6 @@ class InvalidRequestException(JsonRESTError): class LimitExceededException(JsonRESTError): - code = 400 def __init__(self, message: str): @@ -64,7 +60,6 @@ class LimitExceededException(JsonRESTError): class ResourceExistsException(JsonRESTError): - code = 400 def __init__(self, message: str): @@ -72,7 +67,6 @@ class ResourceExistsException(JsonRESTError): class ResourceInUseException(JsonRESTError): - code = 400 def __init__(self, message: str): @@ -80,7 +74,6 @@ class ResourceInUseException(JsonRESTError): class ResourceNotFoundException(JsonRESTError): - code = 400 def __init__(self, message: str): @@ -88,7 +81,6 @@ class ResourceNotFoundException(JsonRESTError): class TagValidationException(JsonRESTError): - code = 400 def __init__(self, message: str): diff --git a/moto/route53resolver/models.py b/moto/route53resolver/models.py index 572b1b73b..3f02147a9 100644 --- a/moto/route53resolver/models.py +++ b/moto/route53resolver/models.py @@ -1,4 +1,5 @@ """Route53ResolverBackend class with methods for supported APIs.""" + import re from collections import defaultdict from datetime import datetime, timezone @@ -233,9 +234,9 @@ class ResolverEndpoint(BaseModel): # pylint: disable=too-many-instance-attribut """ subnets: Dict[str, Any] = defaultdict(dict) for entry in self.ip_addresses: - subnets[entry["SubnetId"]][ - entry["Ip"] - ] = f"rni-{mock_random.get_random_hex(17)}" + subnets[entry["SubnetId"]][entry["Ip"]] = ( + f"rni-{mock_random.get_random_hex(17)}" + ) return subnets def create_eni(self) -> List[str]: diff --git a/moto/route53resolver/responses.py b/moto/route53resolver/responses.py index 52655780a..7901ee364 100644 --- a/moto/route53resolver/responses.py +++ b/moto/route53resolver/responses.py @@ -1,4 +1,5 @@ """Handles incoming route53resolver requests/responses.""" + import json from moto.core.exceptions import InvalidToken diff --git a/moto/route53resolver/urls.py b/moto/route53resolver/urls.py index 61a8ee576..6b15815a7 100644 --- a/moto/route53resolver/urls.py +++ b/moto/route53resolver/urls.py @@ -1,4 +1,5 @@ """route53resolver base URL and path.""" + from .responses import Route53ResolverResponse url_bases = [ diff --git a/moto/route53resolver/validations.py b/moto/route53resolver/validations.py index e17851738..894e7105f 100644 --- a/moto/route53resolver/validations.py +++ b/moto/route53resolver/validations.py @@ -2,6 +2,7 @@ Note that ValidationExceptions are accumulative. """ + import re from typing import Any, Dict, List, Optional, Tuple @@ -36,7 +37,7 @@ def validate_args(validators: List[Tuple[str, Any]]) -> None: err_msgs = [] # This eventually could be a switch (python 3.10), eliminating the need # for the above map and individual functions. - for (fieldname, value) in validators: + for fieldname, value in validators: msg = validation_map[fieldname](value) # type: ignore if msg: err_msgs.append((fieldname, value, msg)) diff --git a/moto/s3/cloud_formation.py b/moto/s3/cloud_formation.py index 6331e3721..e295623e4 100644 --- a/moto/s3/cloud_formation.py +++ b/moto/s3/cloud_formation.py @@ -3,9 +3,8 @@ from typing import Any, Dict, List def cfn_to_api_encryption( - bucket_encryption_properties: Dict[str, Any] + bucket_encryption_properties: Dict[str, Any], ) -> Dict[str, Any]: - sse_algorithm = bucket_encryption_properties["ServerSideEncryptionConfiguration"][ 0 ]["ServerSideEncryptionByDefault"]["SSEAlgorithm"] diff --git a/moto/s3/models.py b/moto/s3/models.py index 7f3722a6d..f133648ba 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -286,9 +286,9 @@ class FakeKey(BaseModel, ManagedState): if self.encryption == "aws:kms" and self.kms_key_id is not None: res["x-amz-server-side-encryption-aws-kms-key-id"] = self.kms_key_id if self.encryption == "aws:kms" and self.bucket_key_enabled is not None: - res[ - "x-amz-server-side-encryption-bucket-key-enabled" - ] = self.bucket_key_enabled + res["x-amz-server-side-encryption-bucket-key-enabled"] = ( + self.bucket_key_enabled + ) if self._storage_class != "STANDARD": res["x-amz-storage-class"] = self._storage_class if self._expiry is not None: @@ -374,11 +374,13 @@ class FakeKey(BaseModel, ManagedState): now = utcnow() try: until = datetime.datetime.strptime( - self.lock_until, "%Y-%m-%dT%H:%M:%SZ" # type: ignore + self.lock_until, + "%Y-%m-%dT%H:%M:%SZ", # type: ignore ) except ValueError: until = datetime.datetime.strptime( - self.lock_until, "%Y-%m-%dT%H:%M:%S.%fZ" # type: ignore + self.lock_until, + "%Y-%m-%dT%H:%M:%S.%fZ", # type: ignore ) if until > now: @@ -476,7 +478,11 @@ class FakeMultipart(BaseModel): raise NoSuchUpload(upload_id=part_id) key = FakeKey( - part_id, value, account_id=self.account_id, encryption=self.sse_encryption, kms_key_id=self.kms_key_id # type: ignore + part_id, + value, + account_id=self.account_id, + encryption=self.sse_encryption, + kms_key_id=self.kms_key_id, # type: ignore ) if part_id in self.parts: # We're overwriting the current part - dispose of it first @@ -1580,9 +1586,9 @@ class FakeBucket(CloudFormationModel): ) if self.notification_configuration: - s_config[ - "BucketNotificationConfiguration" - ] = self.notification_configuration.to_config_dict() + s_config["BucketNotificationConfiguration"] = ( + self.notification_configuration.to_config_dict() + ) else: s_config["BucketNotificationConfiguration"] = {"configurations": {}} @@ -1938,7 +1944,6 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider): and len(requested_versions) + len(delete_markers) + len(common_prefixes) >= max_keys ): - next_key_marker = name if is_common_prefix: # No NextToken when returning common prefixes diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 7e72a1c15..0b17c3915 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -469,17 +469,17 @@ class S3Response(BaseResponse): if cors_matches_origin(origin, cors_rule.allowed_origins): response_headers["Access-Control-Allow-Origin"] = origin if cors_rule.allowed_methods is not None: - response_headers[ - "Access-Control-Allow-Methods" - ] = _to_string(cors_rule.allowed_methods) + response_headers["Access-Control-Allow-Methods"] = ( + _to_string(cors_rule.allowed_methods) + ) if cors_rule.allowed_headers is not None: - response_headers[ - "Access-Control-Allow-Headers" - ] = _to_string(cors_rule.allowed_headers) + response_headers["Access-Control-Allow-Headers"] = ( + _to_string(cors_rule.allowed_headers) + ) if cors_rule.exposed_headers is not None: - response_headers[ - "Access-Control-Expose-Headers" - ] = _to_string(cors_rule.exposed_headers) + response_headers["Access-Control-Expose-Headers"] = ( + _to_string(cors_rule.exposed_headers) + ) if cors_rule.max_age_seconds is not None: response_headers["Access-Control-Max-Age"] = _to_string( cors_rule.max_age_seconds @@ -1388,7 +1388,9 @@ class S3Response(BaseResponse): ) next_part_number_marker = parts[-1].name if parts else 0 is_truncated = len(parts) != 0 and self.backend.is_truncated( - bucket_name, upload_id, next_part_number_marker # type: ignore + bucket_name, + upload_id, + next_part_number_marker, # type: ignore ) template = self.response_template(S3_MULTIPART_LIST_RESPONSE) @@ -1653,10 +1655,12 @@ class S3Response(BaseResponse): if key_to_copy is not None: if key_to_copy.storage_class in ARCHIVE_STORAGE_CLASSES: - if key_to_copy.response_dict.get( - "x-amz-restore" - ) is None or 'ongoing-request="true"' in key_to_copy.response_dict.get( # type: ignore - "x-amz-restore" + if ( + key_to_copy.response_dict.get("x-amz-restore") is None + or 'ongoing-request="true"' + in key_to_copy.response_dict.get( # type: ignore + "x-amz-restore" + ) ): raise ObjectNotInActiveTierError(key_to_copy) @@ -1694,9 +1698,9 @@ class S3Response(BaseResponse): tagging = self._tagging_from_headers(request.headers) self.backend.put_object_tagging(new_key, tagging) if key_to_copy.version_id != "null": - response_headers[ - "x-amz-copy-source-version-id" - ] = key_to_copy.version_id + response_headers["x-amz-copy-source-version-id"] = ( + key_to_copy.version_id + ) # checksum stuff, do we need to compute hash of the copied object checksum_algorithm = request.headers.get("x-amz-checksum-algorithm") @@ -2076,9 +2080,9 @@ class S3Response(BaseResponse): permissions, ) - parsed_xml["BucketLoggingStatus"]["LoggingEnabled"][ - "TargetGrants" - ] = target_grants + parsed_xml["BucketLoggingStatus"]["LoggingEnabled"]["TargetGrants"] = ( + target_grants + ) return parsed_xml["BucketLoggingStatus"]["LoggingEnabled"] @@ -2232,9 +2236,9 @@ class S3Response(BaseResponse): if encryption: response_headers["x-amz-server-side-encryption"] = encryption if kms_key_id: - response_headers[ - "x-amz-server-side-encryption-aws-kms-key-id" - ] = kms_key_id + response_headers["x-amz-server-side-encryption-aws-kms-key-id"] = ( + kms_key_id + ) template = self.response_template(S3_MULTIPART_INITIATE_RESPONSE) response = template.render( diff --git a/moto/s3/utils.py b/moto/s3/utils.py index 5946dbc8f..40fc5c533 100644 --- a/moto/s3/utils.py +++ b/moto/s3/utils.py @@ -115,7 +115,6 @@ def metadata_from_headers(headers: Dict[str, Any]) -> CaseInsensitiveDict: # ty class _VersionedKeyStore(dict): # type: ignore - """A simplified/modified version of Django's `MultiValueDict` taken from: https://github.com/django/django/blob/70576740b0bb5289873f5a9a9a4e1a26b2c330e5/django/utils/datastructures.py#L282 """ diff --git a/moto/s3control/config.py b/moto/s3control/config.py index 203b5e845..63487436c 100644 --- a/moto/s3control/config.py +++ b/moto/s3control/config.py @@ -102,7 +102,6 @@ class S3AccountPublicAccessBlockConfigQuery(ConfigQueryModel[S3ControlBackend]): backend_region: Optional[str] = None, resource_region: Optional[str] = None, ) -> Optional[Dict[str, Any]]: - # Do we even have this defined? backend = self.backends[account_id]["global"] if not backend.public_access_block: diff --git a/moto/s3control/exceptions.py b/moto/s3control/exceptions.py index cd69cdcbd..a7975a518 100644 --- a/moto/s3control/exceptions.py +++ b/moto/s3control/exceptions.py @@ -44,5 +44,5 @@ class AccessPointPolicyNotFound(S3ControlError): super().__init__( "NoSuchAccessPointPolicy", "The specified accesspoint policy does not exist", - **kwargs + **kwargs, ) diff --git a/moto/s3control/responses.py b/moto/s3control/responses.py index 140cf5daf..60976731c 100644 --- a/moto/s3control/responses.py +++ b/moto/s3control/responses.py @@ -19,7 +19,9 @@ class S3ControlResponse(BaseResponse): def backend(self) -> S3ControlBackend: return s3control_backends[self.current_account]["global"] - def public_access_block(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore + def public_access_block( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore self.setup_class(request, full_url, headers) try: if request.method == "GET": @@ -68,7 +70,9 @@ class S3ControlResponse(BaseResponse): if request.method == "DELETE": return self.delete_access_point(full_url) - def access_point_policy(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def access_point_policy( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "PUT": return self.create_access_point_policy(full_url) @@ -77,7 +81,9 @@ class S3ControlResponse(BaseResponse): if request.method == "DELETE": return self.delete_access_point_policy(full_url) - def access_point_policy_status(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return] + def access_point_policy_status( + self, request: Any, full_url: str, headers: Any + ) -> TYPE_RESPONSE: # type: ignore[return] self.setup_class(request, full_url, headers) if request.method == "PUT": return self.create_access_point(full_url) diff --git a/moto/s3control/urls.py b/moto/s3control/urls.py index 4d1c10d28..3cada4e67 100644 --- a/moto/s3control/urls.py +++ b/moto/s3control/urls.py @@ -1,4 +1,5 @@ """s3control base URL and path.""" + from .responses import S3ControlResponse url_bases = [ diff --git a/moto/sagemaker/models.py b/moto/sagemaker/models.py index c7a406e89..a86a9ad66 100644 --- a/moto/sagemaker/models.py +++ b/moto/sagemaker/models.py @@ -1008,9 +1008,9 @@ class FeatureGroup(BaseObject): "Catalog": "AwsDataCatalog", "Database": "sagemaker_featurestore", } - offline_store_config["S3StorageConfig"][ - "ResolvedOutputS3Uri" - ] = f'{offline_store_config["S3StorageConfig"]["S3Uri"]}/{account_id}/{region_name}/offline-store/{feature_group_name}-{int(datetime.now().timestamp())}/data' + offline_store_config["S3StorageConfig"]["ResolvedOutputS3Uri"] = ( + f'{offline_store_config["S3StorageConfig"]["S3Uri"]}/{account_id}/{region_name}/offline-store/{feature_group_name}-{int(datetime.now().timestamp())}/data' + ) self.offline_store_config = offline_store_config self.role_arn = role_arn @@ -2676,9 +2676,9 @@ class SageMakerModelBackend(BaseBackend): client_request_token=client_request_token, ) - self.pipelines[pipeline_name].pipeline_executions[ - pipeline_execution_arn - ] = fake_pipeline_execution + self.pipelines[pipeline_name].pipeline_executions[pipeline_execution_arn] = ( + fake_pipeline_execution + ) self.pipelines[ pipeline_name ].last_execution_time = fake_pipeline_execution.start_time @@ -3521,12 +3521,12 @@ class SageMakerModelBackend(BaseBackend): client_token=client_token, model_package_type=model_package_type, ) - self.model_package_name_mapping[ - model_package.model_package_name - ] = model_package.model_package_arn - self.model_package_name_mapping[ + self.model_package_name_mapping[model_package.model_package_name] = ( model_package.model_package_arn - ] = model_package.model_package_arn + ) + self.model_package_name_mapping[model_package.model_package_arn] = ( + model_package.model_package_arn + ) self.model_packages[model_package.model_package_arn] = model_package return model_package.model_package_arn diff --git a/moto/sagemakerruntime/models.py b/moto/sagemakerruntime/models.py index d3b42aaf1..264829534 100644 --- a/moto/sagemakerruntime/models.py +++ b/moto/sagemakerruntime/models.py @@ -92,9 +92,9 @@ class SageMakerRuntimeBackend(BaseBackend): s3_backend = s3_backends[self.account_id]["global"] s3_backend.create_bucket(output_bucket, region_name=self.region_name) s3_backend.put_object(output_bucket, output_location, value=output) - self.async_results[endpoint_name][ - input_location - ] = f"s3://{output_bucket}/{output_location}" + self.async_results[endpoint_name][input_location] = ( + f"s3://{output_bucket}/{output_location}" + ) return self.async_results[endpoint_name][input_location] diff --git a/moto/sagemakerruntime/urls.py b/moto/sagemakerruntime/urls.py index f8b6976ae..e10ad553a 100644 --- a/moto/sagemakerruntime/urls.py +++ b/moto/sagemakerruntime/urls.py @@ -1,4 +1,5 @@ """sagemakerruntime base URL and path.""" + from .responses import SageMakerRuntimeResponse url_bases = [ diff --git a/moto/scheduler/exceptions.py b/moto/scheduler/exceptions.py index fcf49f736..05f999294 100644 --- a/moto/scheduler/exceptions.py +++ b/moto/scheduler/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the scheduler service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/scheduler/models.py b/moto/scheduler/models.py index b1ade5c0f..f65e07bb1 100644 --- a/moto/scheduler/models.py +++ b/moto/scheduler/models.py @@ -1,4 +1,5 @@ """EventBridgeSchedulerBackend class with methods for supported APIs.""" + from typing import Any, Dict, Iterable, List, Optional from moto.core.base_backend import BackendDict, BaseBackend diff --git a/moto/scheduler/responses.py b/moto/scheduler/responses.py index 6f6c160c2..cbd6ed3ad 100644 --- a/moto/scheduler/responses.py +++ b/moto/scheduler/responses.py @@ -1,4 +1,5 @@ """Handles incoming scheduler requests, invokes methods, returns responses.""" + import json from typing import Any from urllib.parse import unquote diff --git a/moto/scheduler/urls.py b/moto/scheduler/urls.py index 3cc3c435b..00c59b233 100644 --- a/moto/scheduler/urls.py +++ b/moto/scheduler/urls.py @@ -1,4 +1,5 @@ """scheduler base URL and path.""" + from .responses import EventBridgeSchedulerResponse url_bases = [ diff --git a/moto/sdb/exceptions.py b/moto/sdb/exceptions.py index 07eb4e099..ef4b88796 100644 --- a/moto/sdb/exceptions.py +++ b/moto/sdb/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the sdb service.""" + from typing import Any from moto.core.exceptions import RESTError diff --git a/moto/sdb/models.py b/moto/sdb/models.py index eb9829e29..13b49b42b 100644 --- a/moto/sdb/models.py +++ b/moto/sdb/models.py @@ -1,4 +1,5 @@ """SimpleDBBackend class with methods for supported APIs.""" + import re from collections import defaultdict from threading import Lock diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index 819fcea2f..e1be5a0c8 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -488,7 +488,6 @@ class SecretsManagerBackend(BaseBackend): kms_key_id: Optional[str] = None, description: Optional[str] = None, ) -> str: - # error if secret does not exist if secret_id not in self.secrets: raise SecretNotFoundException() @@ -562,7 +561,6 @@ class SecretsManagerBackend(BaseBackend): replica_regions: Optional[List[Dict[str, str]]] = None, force_overwrite: bool = False, ) -> Tuple[FakeSecret, bool]: - if version_stages is None: version_stages = ["AWSCURRENT"] @@ -624,7 +622,6 @@ class SecretsManagerBackend(BaseBackend): client_request_token: str, version_stages: List[str], ) -> str: - if not self._is_valid_identifier(secret_id): raise SecretNotFoundException() else: @@ -883,7 +880,6 @@ class SecretsManagerBackend(BaseBackend): recovery_window_in_days: int, force_delete_without_recovery: bool, ) -> Tuple[str, str, float]: - if recovery_window_in_days is not None and ( recovery_window_in_days < 7 or recovery_window_in_days > 30 ): @@ -938,7 +934,6 @@ class SecretsManagerBackend(BaseBackend): return arn, name, self._unix_time_secs(deletion_date) def restore_secret(self, secret_id: str) -> Tuple[str, str]: - if not self._is_valid_identifier(secret_id): raise SecretNotFoundException() @@ -950,7 +945,6 @@ class SecretsManagerBackend(BaseBackend): return secret.arn, secret.name def tag_resource(self, secret_id: str, tags: List[Dict[str, str]]) -> None: - if secret_id not in self.secrets: raise SecretNotFoundException() @@ -973,7 +967,6 @@ class SecretsManagerBackend(BaseBackend): old_tags.append(tag) def untag_resource(self, secret_id: str, tag_keys: List[str]) -> None: - if secret_id not in self.secrets: raise SecretNotFoundException() diff --git a/moto/secretsmanager/utils.py b/moto/secretsmanager/utils.py index 76a17d507..291781842 100644 --- a/moto/secretsmanager/utils.py +++ b/moto/secretsmanager/utils.py @@ -14,7 +14,6 @@ def random_password( include_space: bool, require_each_included_type: bool, ) -> str: - password = "" required_characters = "" diff --git a/moto/servicediscovery/exceptions.py b/moto/servicediscovery/exceptions.py index 4615306f3..979aca4e9 100644 --- a/moto/servicediscovery/exceptions.py +++ b/moto/servicediscovery/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the servicediscovery service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/servicediscovery/responses.py b/moto/servicediscovery/responses.py index f8087ad62..54afb29b5 100644 --- a/moto/servicediscovery/responses.py +++ b/moto/servicediscovery/responses.py @@ -1,4 +1,5 @@ """Handles incoming servicediscovery requests, invokes methods, returns responses.""" + import json from moto.core.common_types import TYPE_RESPONSE diff --git a/moto/servicediscovery/urls.py b/moto/servicediscovery/urls.py index 53e1b0ffb..5da02f027 100644 --- a/moto/servicediscovery/urls.py +++ b/moto/servicediscovery/urls.py @@ -1,4 +1,5 @@ """servicediscovery base URL and path.""" + from .responses import ServiceDiscoveryResponse url_bases = [ diff --git a/moto/servicequotas/exceptions.py b/moto/servicequotas/exceptions.py index b84cae248..1f7f20a99 100644 --- a/moto/servicequotas/exceptions.py +++ b/moto/servicequotas/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the servicequotas service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/servicequotas/responses.py b/moto/servicequotas/responses.py index eb4975a16..2e35d4e9c 100644 --- a/moto/servicequotas/responses.py +++ b/moto/servicequotas/responses.py @@ -1,4 +1,5 @@ """Handles incoming servicequotas requests, invokes methods, returns responses.""" + import json from moto.core.responses import BaseResponse diff --git a/moto/servicequotas/urls.py b/moto/servicequotas/urls.py index a6a870cf6..72ca729e2 100644 --- a/moto/servicequotas/urls.py +++ b/moto/servicequotas/urls.py @@ -1,4 +1,5 @@ """servicequotas base URL and path.""" + from .responses import ServiceQuotasResponse url_bases = [ diff --git a/moto/ses/feedback.py b/moto/ses/feedback.py index 2d55e1f33..3b1715d93 100644 --- a/moto/ses/feedback.py +++ b/moto/ses/feedback.py @@ -2,6 +2,7 @@ SES Feedback messages Extracted from https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html """ + COMMON_MAIL = { "notificationType": "Bounce, Complaint, or Delivery.", "mail": { diff --git a/moto/ses/responses.py b/moto/ses/responses.py index a16b2460c..85f72eb63 100644 --- a/moto/ses/responses.py +++ b/moto/ses/responses.py @@ -142,7 +142,10 @@ class EmailResponse(BaseResponse): destinations.append({"Destination": destination}) message = self.backend.send_bulk_templated_email( - source, template, template_data, destinations # type: ignore + source, + template, + template_data, + destinations, # type: ignore ) template = self.response_template(SEND_BULK_TEMPLATED_EMAIL_RESPONSE) result = template.render(message=message) @@ -221,9 +224,7 @@ class EmailResponse(BaseResponse): configuration_set_name = self._get_param("ConfigurationSetName") is_configuration_event_enabled = self.querystring.get( "EventDestination.Enabled" - )[ - 0 - ] # type: ignore + )[0] # type: ignore configuration_event_name = self.querystring.get("EventDestination.Name")[0] # type: ignore event_topic_arn = self.querystring.get( # type: ignore "EventDestination.SNSDestination.TopicARN" diff --git a/moto/ses/template.py b/moto/ses/template.py index 61f686abd..91f3a0474 100644 --- a/moto/ses/template.py +++ b/moto/ses/template.py @@ -45,7 +45,9 @@ class EachBlockProcessor(BlockProcessor): self.tokenizer.skip_white_space() _processor = get_processor(self.tokenizer)( - self.template, template_data, self.tokenizer # type: ignore + self.template, + template_data, + self.tokenizer, # type: ignore ) # If we've reached the end, we should stop processing # Our parent will continue with whatever comes after {{/each}} diff --git a/moto/sesv2/responses.py b/moto/sesv2/responses.py index d37b8001c..124f7db71 100644 --- a/moto/sesv2/responses.py +++ b/moto/sesv2/responses.py @@ -1,4 +1,5 @@ """Handles incoming sesv2 requests, invokes methods, returns responses.""" + import base64 import json from typing import List diff --git a/moto/sesv2/urls.py b/moto/sesv2/urls.py index 347b88d96..652a30b21 100644 --- a/moto/sesv2/urls.py +++ b/moto/sesv2/urls.py @@ -1,4 +1,5 @@ """sesv2 base URL and path.""" + from .responses import SESV2Response url_bases = [ diff --git a/moto/signer/responses.py b/moto/signer/responses.py index 9ac758325..2918d546b 100644 --- a/moto/signer/responses.py +++ b/moto/signer/responses.py @@ -1,4 +1,5 @@ """Handles incoming signer requests, invokes methods, returns responses.""" + import json from typing import Any from urllib.parse import unquote diff --git a/moto/signer/urls.py b/moto/signer/urls.py index a7cc666ee..0ed2612dd 100644 --- a/moto/signer/urls.py +++ b/moto/signer/urls.py @@ -1,4 +1,5 @@ """signer base URL and path.""" + from .responses import SignerResponse url_bases = [ diff --git a/moto/sns/responses.py b/moto/sns/responses.py index 3eb0352de..8860c0dfc 100644 --- a/moto/sns/responses.py +++ b/moto/sns/responses.py @@ -181,9 +181,9 @@ class SNSResponse(BaseResponse): attributes["KmsMasterKeyId"] = topic.kms_master_key_id if topic.fifo_topic == "true": attributes["FifoTopic"] = topic.fifo_topic - attributes[ - "ContentBasedDeduplication" - ] = topic.content_based_deduplication + attributes["ContentBasedDeduplication"] = ( + topic.content_based_deduplication + ) response = { "GetTopicAttributesResponse": { "GetTopicAttributesResult": {"Attributes": attributes}, diff --git a/moto/sns/utils.py b/moto/sns/utils.py index e71fa43d1..fb6c0ed53 100644 --- a/moto/sns/utils.py +++ b/moto/sns/utils.py @@ -406,7 +406,6 @@ class FilterPolicyMatcher: elif keyword == "numeric" and isinstance(value, list): if attributes_based_check: if dict_body.get(field, {}).get("Type", "") == "Number": - checks = value numeric_ranges = zip(checks[0::2], checks[1::2]) if _numeric_match( diff --git a/moto/sqs/responses.py b/moto/sqs/responses.py index 23c2dc84e..613a4594c 100644 --- a/moto/sqs/responses.py +++ b/moto/sqs/responses.py @@ -35,7 +35,7 @@ from .utils import ( def jsonify_error( - method: Callable[["SQSResponse"], Union[str, TYPE_RESPONSE]] + method: Callable[["SQSResponse"], Union[str, TYPE_RESPONSE]], ) -> Callable[["SQSResponse"], Union[str, TYPE_RESPONSE]]: """ The decorator to convert an RESTError to JSON, if necessary @@ -54,7 +54,6 @@ def jsonify_error( class SQSResponse(BaseResponse): - region_regex = re.compile(r"://(.+?)\.queue\.amazonaws\.com") def __init__(self) -> None: @@ -617,9 +616,9 @@ class SQSResponse(BaseResponse): message.approximate_first_receive_timestamp ) if attributes["message_deduplication_id"]: - msg["Attributes"][ - "MessageDeduplicationId" - ] = message.deduplication_id + msg["Attributes"]["MessageDeduplicationId"] = ( + message.deduplication_id + ) if attributes["message_group_id"] and message.group_id is not None: msg["Attributes"]["MessageGroupId"] = message.group_id if message.system_attributes and message.system_attributes.get( diff --git a/moto/ssm/models.py b/moto/ssm/models.py index b3e92af19..7395cbff3 100644 --- a/moto/ssm/models.py +++ b/moto/ssm/models.py @@ -374,7 +374,7 @@ MAX_TIMEOUT_SECONDS = 3600 def generate_ssm_doc_param_list( - parameters: Dict[str, Any] + parameters: Dict[str, Any], ) -> Optional[List[Dict[str, Any]]]: if not parameters: return None @@ -1178,9 +1178,9 @@ class SimpleSystemManagerBackend(BaseBackend): super().__init__(region_name, account_id) self._parameters = ParameterDict(account_id, region_name) - self._resource_tags: DefaultDict[ - str, DefaultDict[str, Dict[str, str]] - ] = defaultdict(lambda: defaultdict(dict)) + self._resource_tags: DefaultDict[str, DefaultDict[str, Dict[str, str]]] = ( + defaultdict(lambda: defaultdict(dict)) + ) self._commands: List[Command] = [] self._errors: List[str] = [] self._documents: Dict[str, Documents] = {} diff --git a/moto/ssoadmin/exceptions.py b/moto/ssoadmin/exceptions.py index 32cc025ef..e9e9705f3 100644 --- a/moto/ssoadmin/exceptions.py +++ b/moto/ssoadmin/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the ssoadmin service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/ssoadmin/urls.py b/moto/ssoadmin/urls.py index 531968fd6..576ef2dc2 100644 --- a/moto/ssoadmin/urls.py +++ b/moto/ssoadmin/urls.py @@ -1,4 +1,5 @@ """ssoadmin base URL and path.""" + from .responses import SSOAdminResponse url_bases = [ diff --git a/moto/stepfunctions/models.py b/moto/stepfunctions/models.py index 62e9b3dcd..0a5e392f1 100644 --- a/moto/stepfunctions/models.py +++ b/moto/stepfunctions/models.py @@ -437,12 +437,12 @@ class StepFunctionBackend(BaseBackend): "\u0007", "\u0008", "\u0009", - "\u000A", - "\u000B", - "\u000C", - "\u000D", - "\u000E", - "\u000F", + "\u000a", + "\u000b", + "\u000c", + "\u000d", + "\u000e", + "\u000f", "\u0010", "\u0011", "\u0012", @@ -453,13 +453,13 @@ class StepFunctionBackend(BaseBackend): "\u0017", "\u0018", "\u0019", - "\u001A", - "\u001B", - "\u001C", - "\u001D", - "\u001E", - "\u001F", - "\u007F", + "\u001a", + "\u001b", + "\u001c", + "\u001d", + "\u001e", + "\u001f", + "\u007f", "\u0080", "\u0081", "\u0082", @@ -470,12 +470,12 @@ class StepFunctionBackend(BaseBackend): "\u0087", "\u0088", "\u0089", - "\u008A", - "\u008B", - "\u008C", - "\u008D", - "\u008E", - "\u008F", + "\u008a", + "\u008b", + "\u008c", + "\u008d", + "\u008e", + "\u008f", "\u0090", "\u0091", "\u0092", @@ -486,12 +486,12 @@ class StepFunctionBackend(BaseBackend): "\u0097", "\u0098", "\u0099", - "\u009A", - "\u009B", - "\u009C", - "\u009D", - "\u009E", - "\u009F", + "\u009a", + "\u009b", + "\u009c", + "\u009d", + "\u009e", + "\u009f", ] accepted_role_arn_format = re.compile( "arn:aws:iam::(?P[0-9]{12}):role/.+" diff --git a/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicLexer.py b/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicLexer.py index cb0699a78..1339c2a7e 100644 --- a/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicLexer.py +++ b/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicLexer.py @@ -3650,7 +3650,6 @@ def serializedATN(): class ASLIntrinsicLexer(Lexer): - atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)] diff --git a/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParser.py b/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParser.py index af786f1a8..8a55ca44f 100644 --- a/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParser.py +++ b/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParser.py @@ -1104,7 +1104,6 @@ def serializedATN(): class ASLIntrinsicParser(Parser): - grammarFileName = "ASLIntrinsicParser.g4" atn = ATNDeserializer().deserialize(serializedATN()) @@ -1314,7 +1313,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def func_decl(self): - localctx = ASLIntrinsicParser.Func_declContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_func_decl) try: @@ -1368,7 +1366,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def states_func_decl(self): - localctx = ASLIntrinsicParser.States_func_declContext( self, self._ctx, self.state ) @@ -1472,7 +1469,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def state_fun_name(self): - localctx = ASLIntrinsicParser.State_fun_nameContext(self, self._ctx, self.state) self.enterRule(localctx, 4, self.RULE_state_fun_name) self._la = 0 # Token type @@ -1480,7 +1476,7 @@ class ASLIntrinsicParser(Parser): self.enterOuterAlt(localctx, 1) self.state = 33 _la = self._input.LA(1) - if not ((((_la) & ~0x3F) == 0 and ((1 << _la) & 274876858368) != 0)): + if not (((_la) & ~0x3F) == 0 and ((1 << _la) & 274876858368) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1538,7 +1534,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def func_arg_list(self): - localctx = ASLIntrinsicParser.Func_arg_listContext(self, self._ctx, self.state) self.enterRule(localctx, 6, self.RULE_func_arg_list) self._la = 0 # Token type @@ -1771,7 +1766,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def func_arg(self): - localctx = ASLIntrinsicParser.Func_argContext(self, self._ctx, self.state) self.enterRule(localctx, 8, self.RULE_func_arg) self._la = 0 # Token type @@ -1876,7 +1870,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def context_path(self): - localctx = ASLIntrinsicParser.Context_pathContext(self, self._ctx, self.state) self.enterRule(localctx, 10, self.RULE_context_path) try: @@ -1939,7 +1932,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def json_path(self): - localctx = ASLIntrinsicParser.Json_pathContext(self, self._ctx, self.state) self.enterRule(localctx, 12, self.RULE_json_path) self._la = 0 # Token type @@ -2006,7 +1998,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def json_path_part(self): - localctx = ASLIntrinsicParser.Json_path_partContext(self, self._ctx, self.state) self.enterRule(localctx, 14, self.RULE_json_path_part) try: @@ -2063,7 +2054,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def json_path_iden(self): - localctx = ASLIntrinsicParser.Json_path_idenContext(self, self._ctx, self.state) self.enterRule(localctx, 16, self.RULE_json_path_iden) try: @@ -2111,7 +2101,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def json_path_iden_qual(self): - localctx = ASLIntrinsicParser.Json_path_iden_qualContext( self, self._ctx, self.state ) @@ -2235,7 +2224,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def json_path_qual(self): - localctx = ASLIntrinsicParser.Json_path_qualContext(self, self._ctx, self.state) self.enterRule(localctx, 20, self.RULE_json_path_qual) try: @@ -2476,7 +2464,7 @@ class ASLIntrinsicParser(Parser): if token in [9, 10, 15]: self.state = 96 _la = self._input.LA(1) - if not ((((_la) & ~0x3F) == 0 and ((1 << _la) & 34304) != 0)): + if not (((_la) & ~0x3F) == 0 and ((1 << _la) & 34304) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -2599,7 +2587,6 @@ class ASLIntrinsicParser(Parser): return visitor.visitChildren(self) def identifier(self): - localctx = ASLIntrinsicParser.IdentifierContext(self, self._ctx, self.state) self.enterRule(localctx, 24, self.RULE_identifier) try: diff --git a/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParserListener.py b/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParserListener.py index 38a7e6280..f72bce75a 100644 --- a/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParserListener.py +++ b/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParserListener.py @@ -6,9 +6,9 @@ if "." in __name__: else: from ASLIntrinsicParser import ASLIntrinsicParser + # This class defines a complete listener for a parse tree produced by ASLIntrinsicParser. class ASLIntrinsicParserListener(ParseTreeListener): - # Enter a parse tree produced by ASLIntrinsicParser#func_decl. def enterFunc_decl(self, ctx: ASLIntrinsicParser.Func_declContext): pass diff --git a/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParserVisitor.py b/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParserVisitor.py index 987a0683e..6dc6c24fc 100644 --- a/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParserVisitor.py +++ b/moto/stepfunctions/parser/asl/antlr/runtime/ASLIntrinsicParserVisitor.py @@ -10,7 +10,6 @@ else: class ASLIntrinsicParserVisitor(ParseTreeVisitor): - # Visit a parse tree produced by ASLIntrinsicParser#func_decl. def visitFunc_decl(self, ctx: ASLIntrinsicParser.Func_declContext): return self.visitChildren(ctx) diff --git a/moto/stepfunctions/parser/asl/antlr/runtime/ASLLexer.py b/moto/stepfunctions/parser/asl/antlr/runtime/ASLLexer.py index 197664fd1..de78347f8 100644 --- a/moto/stepfunctions/parser/asl/antlr/runtime/ASLLexer.py +++ b/moto/stepfunctions/parser/asl/antlr/runtime/ASLLexer.py @@ -19099,7 +19099,6 @@ def serializedATN(): class ASLLexer(Lexer): - atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)] diff --git a/moto/stepfunctions/parser/asl/antlr/runtime/ASLParser.py b/moto/stepfunctions/parser/asl/antlr/runtime/ASLParser.py index 2a19a5637..123cca49c 100644 --- a/moto/stepfunctions/parser/asl/antlr/runtime/ASLParser.py +++ b/moto/stepfunctions/parser/asl/antlr/runtime/ASLParser.py @@ -6752,7 +6752,6 @@ def serializedATN(): class ASLParser(Parser): - grammarFileName = "ASLParser.g4" atn = ATNDeserializer().deserialize(serializedATN()) @@ -7394,7 +7393,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def program_decl(self): - localctx = ASLParser.Program_declContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_program_decl) self._la = 0 # Token type @@ -7468,7 +7466,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def top_layer_stmt(self): - localctx = ASLParser.Top_layer_stmtContext(self, self._ctx, self.state) self.enterRule(localctx, 2, self.RULE_top_layer_stmt) try: @@ -7547,7 +7544,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def startat_decl(self): - localctx = ASLParser.Startat_declContext(self, self._ctx, self.state) self.enterRule(localctx, 4, self.RULE_startat_decl) try: @@ -7602,7 +7598,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def comment_decl(self): - localctx = ASLParser.Comment_declContext(self, self._ctx, self.state) self.enterRule(localctx, 6, self.RULE_comment_decl) try: @@ -7657,7 +7652,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def version_decl(self): - localctx = ASLParser.Version_declContext(self, self._ctx, self.state) self.enterRule(localctx, 8, self.RULE_version_decl) try: @@ -7803,7 +7797,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def state_stmt(self): - localctx = ASLParser.State_stmtContext(self, self._ctx, self.state) self.enterRule(localctx, 10, self.RULE_state_stmt) try: @@ -8032,7 +8025,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def states_decl(self): - localctx = ASLParser.States_declContext(self, self._ctx, self.state) self.enterRule(localctx, 12, self.RULE_states_decl) self._la = 0 # Token type @@ -8098,7 +8090,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def state_name(self): - localctx = ASLParser.State_nameContext(self, self._ctx, self.state) self.enterRule(localctx, 14, self.RULE_state_name) try: @@ -8149,7 +8140,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def state_decl(self): - localctx = ASLParser.State_declContext(self, self._ctx, self.state) self.enterRule(localctx, 16, self.RULE_state_decl) try: @@ -8213,7 +8203,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def state_decl_body(self): - localctx = ASLParser.State_decl_bodyContext(self, self._ctx, self.state) self.enterRule(localctx, 18, self.RULE_state_decl_body) self._la = 0 # Token type @@ -8281,7 +8270,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def type_decl(self): - localctx = ASLParser.Type_declContext(self, self._ctx, self.state) self.enterRule(localctx, 20, self.RULE_type_decl) try: @@ -8336,7 +8324,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def next_decl(self): - localctx = ASLParser.Next_declContext(self, self._ctx, self.state) self.enterRule(localctx, 22, self.RULE_next_decl) try: @@ -8391,7 +8378,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def resource_decl(self): - localctx = ASLParser.Resource_declContext(self, self._ctx, self.state) self.enterRule(localctx, 24, self.RULE_resource_decl) try: @@ -8449,7 +8435,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def input_path_decl(self): - localctx = ASLParser.Input_path_declContext(self, self._ctx, self.state) self.enterRule(localctx, 26, self.RULE_input_path_decl) try: @@ -8637,7 +8622,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def result_decl(self): - localctx = ASLParser.Result_declContext(self, self._ctx, self.state) self.enterRule(localctx, 28, self.RULE_result_decl) try: @@ -8695,7 +8679,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def result_path_decl(self): - localctx = ASLParser.Result_path_declContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_result_path_decl) try: @@ -8886,7 +8869,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def output_path_decl(self): - localctx = ASLParser.Output_path_declContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_output_path_decl) try: @@ -9077,7 +9059,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def end_decl(self): - localctx = ASLParser.End_declContext(self, self._ctx, self.state) self.enterRule(localctx, 34, self.RULE_end_decl) self._la = 0 # Token type @@ -9138,7 +9119,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def default_decl(self): - localctx = ASLParser.Default_declContext(self, self._ctx, self.state) self.enterRule(localctx, 36, self.RULE_default_decl) try: @@ -9193,7 +9173,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def error_decl(self): - localctx = ASLParser.Error_declContext(self, self._ctx, self.state) self.enterRule(localctx, 38, self.RULE_error_decl) try: @@ -9248,7 +9227,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def cause_decl(self): - localctx = ASLParser.Cause_declContext(self, self._ctx, self.state) self.enterRule(localctx, 40, self.RULE_cause_decl) try: @@ -9303,7 +9281,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def seconds_decl(self): - localctx = ASLParser.Seconds_declContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_seconds_decl) try: @@ -9358,7 +9335,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def seconds_path_decl(self): - localctx = ASLParser.Seconds_path_declContext(self, self._ctx, self.state) self.enterRule(localctx, 44, self.RULE_seconds_path_decl) try: @@ -9413,7 +9389,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def timestamp_decl(self): - localctx = ASLParser.Timestamp_declContext(self, self._ctx, self.state) self.enterRule(localctx, 46, self.RULE_timestamp_decl) try: @@ -9468,7 +9443,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def timestamp_path_decl(self): - localctx = ASLParser.Timestamp_path_declContext(self, self._ctx, self.state) self.enterRule(localctx, 48, self.RULE_timestamp_path_decl) try: @@ -9523,7 +9497,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def items_path_decl(self): - localctx = ASLParser.Items_path_declContext(self, self._ctx, self.state) self.enterRule(localctx, 50, self.RULE_items_path_decl) try: @@ -9578,7 +9551,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def max_concurrency_decl(self): - localctx = ASLParser.Max_concurrency_declContext(self, self._ctx, self.state) self.enterRule(localctx, 52, self.RULE_max_concurrency_decl) try: @@ -9633,7 +9605,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def parameters_decl(self): - localctx = ASLParser.Parameters_declContext(self, self._ctx, self.state) self.enterRule(localctx, 54, self.RULE_parameters_decl) try: @@ -9688,7 +9659,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def timeout_seconds_decl(self): - localctx = ASLParser.Timeout_seconds_declContext(self, self._ctx, self.state) self.enterRule(localctx, 56, self.RULE_timeout_seconds_decl) try: @@ -9743,7 +9713,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def timeout_seconds_path_decl(self): - localctx = ASLParser.Timeout_seconds_path_declContext( self, self._ctx, self.state ) @@ -9800,7 +9769,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def heartbeat_seconds_decl(self): - localctx = ASLParser.Heartbeat_seconds_declContext(self, self._ctx, self.state) self.enterRule(localctx, 60, self.RULE_heartbeat_seconds_decl) try: @@ -9855,7 +9823,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def heartbeat_seconds_path_decl(self): - localctx = ASLParser.Heartbeat_seconds_path_declContext( self, self._ctx, self.state ) @@ -9921,7 +9888,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def payload_tmpl_decl(self): - localctx = ASLParser.Payload_tmpl_declContext(self, self._ctx, self.state) self.enterRule(localctx, 64, self.RULE_payload_tmpl_decl) self._la = 0 # Token type @@ -10103,7 +10069,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def payload_binding(self): - localctx = ASLParser.Payload_bindingContext(self, self._ctx, self.state) self.enterRule(localctx, 66, self.RULE_payload_binding) try: @@ -10196,7 +10161,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def intrinsic_func(self): - localctx = ASLParser.Intrinsic_funcContext(self, self._ctx, self.state) self.enterRule(localctx, 68, self.RULE_intrinsic_func) try: @@ -10256,7 +10220,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def payload_arr_decl(self): - localctx = ASLParser.Payload_arr_declContext(self, self._ctx, self.state) self.enterRule(localctx, 70, self.RULE_payload_arr_decl) self._la = 0 # Token type @@ -10341,7 +10304,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def payload_value_decl(self): - localctx = ASLParser.Payload_value_declContext(self, self._ctx, self.state) self.enterRule(localctx, 72, self.RULE_payload_value_decl) try: @@ -10519,7 +10481,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def payload_value_lit(self): - localctx = ASLParser.Payload_value_litContext(self, self._ctx, self.state) self.enterRule(localctx, 74, self.RULE_payload_value_lit) self._la = 0 # Token type @@ -10730,7 +10691,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def result_selector_decl(self): - localctx = ASLParser.Result_selector_declContext(self, self._ctx, self.state) self.enterRule(localctx, 76, self.RULE_result_selector_decl) try: @@ -10800,7 +10760,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def state_type(self): - localctx = ASLParser.State_typeContext(self, self._ctx, self.state) self.enterRule(localctx, 78, self.RULE_state_type) self._la = 0 # Token type @@ -10808,7 +10767,7 @@ class ASLParser(Parser): self.enterOuterAlt(localctx, 1) self.state = 426 _la = self._input.LA(1) - if not ((((_la) & ~0x3F) == 0 and ((1 << _la) & 16711680) != 0)): + if not (((_la) & ~0x3F) == 0 and ((1 << _la) & 16711680) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -10872,7 +10831,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def choices_decl(self): - localctx = ASLParser.Choices_declContext(self, self._ctx, self.state) self.enterRule(localctx, 80, self.RULE_choices_decl) self._la = 0 # Token type @@ -11010,7 +10968,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def choice_rule(self): - localctx = ASLParser.Choice_ruleContext(self, self._ctx, self.state) self.enterRule(localctx, 82, self.RULE_choice_rule) self._la = 0 # Token type @@ -11114,7 +11071,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def comparison_variable_stmt(self): - localctx = ASLParser.Comparison_variable_stmtContext( self, self._ctx, self.state ) @@ -11222,7 +11178,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def comparison_composite_stmt(self): - localctx = ASLParser.Comparison_composite_stmtContext( self, self._ctx, self.state ) @@ -11303,7 +11258,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def comparison_composite(self): - localctx = ASLParser.Comparison_compositeContext(self, self._ctx, self.state) self.enterRule(localctx, 88, self.RULE_comparison_composite) self._la = 0 # Token type @@ -11387,7 +11341,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def variable_decl(self): - localctx = ASLParser.Variable_declContext(self, self._ctx, self.state) self.enterRule(localctx, 90, self.RULE_variable_decl) try: @@ -11442,7 +11395,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def comparison_func(self): - localctx = ASLParser.Comparison_funcContext(self, self._ctx, self.state) self.enterRule(localctx, 92, self.RULE_comparison_func) try: @@ -11512,7 +11464,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def branches_decl(self): - localctx = ASLParser.Branches_declContext(self, self._ctx, self.state) self.enterRule(localctx, 94, self.RULE_branches_decl) self._la = 0 # Token type @@ -11599,7 +11550,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def item_processor_decl(self): - localctx = ASLParser.Item_processor_declContext(self, self._ctx, self.state) self.enterRule(localctx, 96, self.RULE_item_processor_decl) self._la = 0 # Token type @@ -11674,7 +11624,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def item_processor_item(self): - localctx = ASLParser.Item_processor_itemContext(self, self._ctx, self.state) self.enterRule(localctx, 98, self.RULE_item_processor_item) try: @@ -11767,7 +11716,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def processor_config_decl(self): - localctx = ASLParser.Processor_config_declContext(self, self._ctx, self.state) self.enterRule(localctx, 100, self.RULE_processor_config_decl) self._la = 0 # Token type @@ -11836,7 +11784,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def processor_config_field(self): - localctx = ASLParser.Processor_config_fieldContext(self, self._ctx, self.state) self.enterRule(localctx, 102, self.RULE_processor_config_field) try: @@ -11900,7 +11847,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def mode_decl(self): - localctx = ASLParser.Mode_declContext(self, self._ctx, self.state) self.enterRule(localctx, 104, self.RULE_mode_decl) try: @@ -11952,7 +11898,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def mode_type(self): - localctx = ASLParser.Mode_typeContext(self, self._ctx, self.state) self.enterRule(localctx, 106, self.RULE_mode_type) self._la = 0 # Token type @@ -12009,7 +11954,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def execution_decl(self): - localctx = ASLParser.Execution_declContext(self, self._ctx, self.state) self.enterRule(localctx, 108, self.RULE_execution_decl) try: @@ -12058,7 +12002,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def execution_type(self): - localctx = ASLParser.Execution_typeContext(self, self._ctx, self.state) self.enterRule(localctx, 110, self.RULE_execution_type) try: @@ -12124,7 +12067,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def iterator_decl(self): - localctx = ASLParser.Iterator_declContext(self, self._ctx, self.state) self.enterRule(localctx, 112, self.RULE_iterator_decl) self._la = 0 # Token type @@ -12196,7 +12138,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def iterator_decl_item(self): - localctx = ASLParser.Iterator_decl_itemContext(self, self._ctx, self.state) self.enterRule(localctx, 114, self.RULE_iterator_decl_item) try: @@ -12265,7 +12206,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def item_selector_decl(self): - localctx = ASLParser.Item_selector_declContext(self, self._ctx, self.state) self.enterRule(localctx, 116, self.RULE_item_selector_decl) try: @@ -12335,7 +12275,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def item_reader_decl(self): - localctx = ASLParser.Item_reader_declContext(self, self._ctx, self.state) self.enterRule(localctx, 118, self.RULE_item_reader_decl) self._la = 0 # Token type @@ -12407,7 +12346,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def items_reader_field(self): - localctx = ASLParser.Items_reader_fieldContext(self, self._ctx, self.state) self.enterRule(localctx, 120, self.RULE_items_reader_field) try: @@ -12491,7 +12429,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def reader_config_decl(self): - localctx = ASLParser.Reader_config_declContext(self, self._ctx, self.state) self.enterRule(localctx, 122, self.RULE_reader_config_decl) self._la = 0 # Token type @@ -12571,7 +12508,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def reader_config_field(self): - localctx = ASLParser.Reader_config_fieldContext(self, self._ctx, self.state) self.enterRule(localctx, 124, self.RULE_reader_config_field) try: @@ -12650,7 +12586,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def input_type_decl(self): - localctx = ASLParser.Input_type_declContext(self, self._ctx, self.state) self.enterRule(localctx, 126, self.RULE_input_type_decl) try: @@ -12705,7 +12640,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def csv_header_location_decl(self): - localctx = ASLParser.Csv_header_location_declContext( self, self._ctx, self.state ) @@ -12777,7 +12711,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def csv_headers_decl(self): - localctx = ASLParser.Csv_headers_declContext(self, self._ctx, self.state) self.enterRule(localctx, 130, self.RULE_csv_headers_decl) self._la = 0 # Token type @@ -12849,7 +12782,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def max_items_decl(self): - localctx = ASLParser.Max_items_declContext(self, self._ctx, self.state) self.enterRule(localctx, 132, self.RULE_max_items_decl) try: @@ -12904,7 +12836,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def max_items_path_decl(self): - localctx = ASLParser.Max_items_path_declContext(self, self._ctx, self.state) self.enterRule(localctx, 134, self.RULE_max_items_path_decl) try: @@ -12974,7 +12905,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def retry_decl(self): - localctx = ASLParser.Retry_declContext(self, self._ctx, self.state) self.enterRule(localctx, 136, self.RULE_retry_decl) self._la = 0 # Token type @@ -13059,7 +12989,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def retrier_decl(self): - localctx = ASLParser.Retrier_declContext(self, self._ctx, self.state) self.enterRule(localctx, 138, self.RULE_retrier_decl) self._la = 0 # Token type @@ -13130,7 +13059,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def retrier_stmt(self): - localctx = ASLParser.Retrier_stmtContext(self, self._ctx, self.state) self.enterRule(localctx, 140, self.RULE_retrier_stmt) try: @@ -13219,7 +13147,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def error_equals_decl(self): - localctx = ASLParser.Error_equals_declContext(self, self._ctx, self.state) self.enterRule(localctx, 142, self.RULE_error_equals_decl) self._la = 0 # Token type @@ -13291,7 +13218,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def interval_seconds_decl(self): - localctx = ASLParser.Interval_seconds_declContext(self, self._ctx, self.state) self.enterRule(localctx, 144, self.RULE_interval_seconds_decl) try: @@ -13346,7 +13272,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def max_attempts_decl(self): - localctx = ASLParser.Max_attempts_declContext(self, self._ctx, self.state) self.enterRule(localctx, 146, self.RULE_max_attempts_decl) try: @@ -13404,7 +13329,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def backoff_rate_decl(self): - localctx = ASLParser.Backoff_rate_declContext(self, self._ctx, self.state) self.enterRule(localctx, 148, self.RULE_backoff_rate_decl) self._la = 0 # Token type @@ -13480,7 +13404,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def catch_decl(self): - localctx = ASLParser.Catch_declContext(self, self._ctx, self.state) self.enterRule(localctx, 150, self.RULE_catch_decl) self._la = 0 # Token type @@ -13565,7 +13488,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def catcher_decl(self): - localctx = ASLParser.Catcher_declContext(self, self._ctx, self.state) self.enterRule(localctx, 152, self.RULE_catcher_decl) self._la = 0 # Token type @@ -13633,7 +13555,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def catcher_stmt(self): - localctx = ASLParser.Catcher_stmtContext(self, self._ctx, self.state) self.enterRule(localctx, 154, self.RULE_catcher_stmt) try: @@ -13810,7 +13731,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def comparison_op(self): - localctx = ASLParser.Comparison_opContext(self, self._ctx, self.state) self.enterRule(localctx, 156, self.RULE_comparison_op) self._la = 0 # Token type @@ -13819,10 +13739,7 @@ class ASLParser(Parser): self.state = 735 _la = self._input.LA(1) if not ( - ( - (((_la - 29)) & ~0x3F) == 0 - and ((1 << (_la - 29)) & 2199022731007) != 0 - ) + ((_la - 29) & ~0x3F) == 0 and ((1 << (_la - 29)) & 2199022731007) != 0 ): self._errHandler.recoverInline(self) else: @@ -13872,7 +13789,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def choice_operator(self): - localctx = ASLParser.Choice_operatorContext(self, self._ctx, self.state) self.enterRule(localctx, 158, self.RULE_choice_operator) self._la = 0 # Token type @@ -13880,7 +13796,7 @@ class ASLParser(Parser): self.enterOuterAlt(localctx, 1) self.state = 737 _la = self._input.LA(1) - if not ((((_la) & ~0x3F) == 0 and ((1 << _la) & 281612684099584) != 0)): + if not (((_la) & ~0x3F) == 0 and ((1 << _la) & 281612684099584) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -13964,7 +13880,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def states_error_name(self): - localctx = ASLParser.States_error_nameContext(self, self._ctx, self.state) self.enterRule(localctx, 160, self.RULE_states_error_name) self._la = 0 # Token type @@ -13972,9 +13887,7 @@ class ASLParser(Parser): self.enterOuterAlt(localctx, 1) self.state = 739 _la = self._input.LA(1) - if not ( - ((((_la - 113)) & ~0x3F) == 0 and ((1 << (_la - 113)) & 16383) != 0) - ): + if not (((_la - 113) & ~0x3F) == 0 and ((1 << (_la - 113)) & 16383) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -14020,7 +13933,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def error_name(self): - localctx = ASLParser.Error_nameContext(self, self._ctx, self.state) self.enterRule(localctx, 162, self.RULE_error_name) try: @@ -14092,7 +14004,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def json_obj_decl(self): - localctx = ASLParser.Json_obj_declContext(self, self._ctx, self.state) self.enterRule(localctx, 164, self.RULE_json_obj_decl) self._la = 0 # Token type @@ -14174,7 +14085,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def json_binding(self): - localctx = ASLParser.Json_bindingContext(self, self._ctx, self.state) self.enterRule(localctx, 166, self.RULE_json_binding) try: @@ -14238,7 +14148,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def json_arr_decl(self): - localctx = ASLParser.Json_arr_declContext(self, self._ctx, self.state) self.enterRule(localctx, 168, self.RULE_json_arr_decl) self._la = 0 # Token type @@ -14338,7 +14247,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def json_value_decl(self): - localctx = ASLParser.Json_value_declContext(self, self._ctx, self.state) self.enterRule(localctx, 170, self.RULE_json_value_decl) try: @@ -14796,7 +14704,6 @@ class ASLParser(Parser): return visitor.visitChildren(self) def keyword_or_string(self): - localctx = ASLParser.Keyword_or_stringContext(self, self._ctx, self.state) self.enterRule(localctx, 172, self.RULE_keyword_or_string) self._la = 0 # Token type @@ -14805,9 +14712,9 @@ class ASLParser(Parser): self.state = 790 _la = self._input.LA(1) if not ( - ((((_la - 10)) & ~0x3F) == 0 and ((1 << (_la - 10)) & -17) != 0) + (((_la - 10) & ~0x3F) == 0 and ((1 << (_la - 10)) & -17) != 0) or ( - (((_la - 74)) & ~0x3F) == 0 + ((_la - 74) & ~0x3F) == 0 and ((1 << (_la - 74)) & 144115188075855871) != 0 ) ): diff --git a/moto/stepfunctions/parser/asl/antlr/runtime/ASLParserListener.py b/moto/stepfunctions/parser/asl/antlr/runtime/ASLParserListener.py index 0236c85fb..840ff05aa 100644 --- a/moto/stepfunctions/parser/asl/antlr/runtime/ASLParserListener.py +++ b/moto/stepfunctions/parser/asl/antlr/runtime/ASLParserListener.py @@ -6,9 +6,9 @@ if "." in __name__: else: from ASLParser import ASLParser + # This class defines a complete listener for a parse tree produced by ASLParser. class ASLParserListener(ParseTreeListener): - # Enter a parse tree produced by ASLParser#program_decl. def enterProgram_decl(self, ctx: ASLParser.Program_declContext): pass diff --git a/moto/stepfunctions/parser/asl/antlr/runtime/ASLParserVisitor.py b/moto/stepfunctions/parser/asl/antlr/runtime/ASLParserVisitor.py index 6b2c059b7..1ae03bb4f 100644 --- a/moto/stepfunctions/parser/asl/antlr/runtime/ASLParserVisitor.py +++ b/moto/stepfunctions/parser/asl/antlr/runtime/ASLParserVisitor.py @@ -10,7 +10,6 @@ else: class ASLParserVisitor(ParseTreeVisitor): - # Visit a parse tree produced by ASLParser#program_decl. def visitProgram_decl(self, ctx: ASLParser.Program_declContext): return self.visitChildren(ctx) diff --git a/moto/stepfunctions/parser/asl/component/common/catch/catcher_outcome.py b/moto/stepfunctions/parser/asl/component/common/catch/catcher_outcome.py index d37768355..83166764b 100644 --- a/moto/stepfunctions/parser/asl/component/common/catch/catcher_outcome.py +++ b/moto/stepfunctions/parser/asl/component/common/catch/catcher_outcome.py @@ -1,8 +1,7 @@ import abc -class CatcherOutcome(abc.ABC): - ... +class CatcherOutcome(abc.ABC): ... class CatcherOutcomeCaught(CatcherOutcome): diff --git a/moto/stepfunctions/parser/asl/component/common/error_name/error_equals_decl.py b/moto/stepfunctions/parser/asl/component/common/error_name/error_equals_decl.py index b8fb636d3..327c16f21 100644 --- a/moto/stepfunctions/parser/asl/component/common/error_name/error_equals_decl.py +++ b/moto/stepfunctions/parser/asl/component/common/error_name/error_equals_decl.py @@ -52,8 +52,9 @@ class ErrorEqualsDecl(EvalComponent): if ErrorEqualsDecl._STATE_ALL_ERROR in self.error_names: res = True - elif ErrorEqualsDecl._STATE_TASK_ERROR in self.error_names and not isinstance( - error_name, StatesErrorName + elif ( + ErrorEqualsDecl._STATE_TASK_ERROR in self.error_names + and not isinstance(error_name, StatesErrorName) ): # TODO: consider binding a 'context' variable to error_names to more formally detect their evaluation type. res = True else: diff --git a/moto/stepfunctions/parser/asl/component/common/error_name/states_error_name_type.py b/moto/stepfunctions/parser/asl/component/common/error_name/states_error_name_type.py index 29c50f4fa..4b2f02b3e 100644 --- a/moto/stepfunctions/parser/asl/component/common/error_name/states_error_name_type.py +++ b/moto/stepfunctions/parser/asl/component/common/error_name/states_error_name_type.py @@ -47,6 +47,6 @@ def _reverse_error_name_lookup() -> Dict[str, StatesErrorNameType]: return lookup -_REVERSE_NAME_LOOKUP: Final[ - Dict[str, StatesErrorNameType] -] = _reverse_error_name_lookup() +_REVERSE_NAME_LOOKUP: Final[Dict[str, StatesErrorNameType]] = ( + _reverse_error_name_lookup() +) diff --git a/moto/stepfunctions/parser/asl/component/common/payload/payloadvalue/payload_value.py b/moto/stepfunctions/parser/asl/component/common/payload/payloadvalue/payload_value.py index a784f507a..8bb3733f1 100644 --- a/moto/stepfunctions/parser/asl/component/common/payload/payloadvalue/payload_value.py +++ b/moto/stepfunctions/parser/asl/component/common/payload/payloadvalue/payload_value.py @@ -3,5 +3,4 @@ import abc from moto.stepfunctions.parser.asl.component.eval_component import EvalComponent -class PayloadValue(EvalComponent, abc.ABC): - ... +class PayloadValue(EvalComponent, abc.ABC): ... diff --git a/moto/stepfunctions/parser/asl/component/common/payload/payloadvalue/payloadbinding/payload_binding.py b/moto/stepfunctions/parser/asl/component/common/payload/payloadvalue/payloadbinding/payload_binding.py index ac80eb192..020e11e94 100644 --- a/moto/stepfunctions/parser/asl/component/common/payload/payloadvalue/payloadbinding/payload_binding.py +++ b/moto/stepfunctions/parser/asl/component/common/payload/payloadvalue/payloadbinding/payload_binding.py @@ -12,8 +12,7 @@ class PayloadBinding(PayloadValue, abc.ABC): self.field: Final[str] = field @abc.abstractmethod - def _eval_val(self, env: Environment) -> Any: - ... + def _eval_val(self, env: Environment) -> Any: ... def _eval_body(self, env: Environment) -> None: cnt: dict = env.stack.pop() diff --git a/moto/stepfunctions/parser/asl/component/common/timeouts/heartbeat.py b/moto/stepfunctions/parser/asl/component/common/timeouts/heartbeat.py index 60591c9ba..16c9e506a 100644 --- a/moto/stepfunctions/parser/asl/component/common/timeouts/heartbeat.py +++ b/moto/stepfunctions/parser/asl/component/common/timeouts/heartbeat.py @@ -8,8 +8,7 @@ from moto.stepfunctions.parser.asl.utils.json_path import JSONPathUtils class Heartbeat(EvalComponent, abc.ABC): @abc.abstractmethod - def _eval_seconds(self, env: Environment) -> int: - ... + def _eval_seconds(self, env: Environment) -> int: ... def _eval_body(self, env: Environment) -> None: seconds = self._eval_seconds(env=env) diff --git a/moto/stepfunctions/parser/asl/component/common/timeouts/timeout.py b/moto/stepfunctions/parser/asl/component/common/timeouts/timeout.py index 60f1e5880..23056a71d 100644 --- a/moto/stepfunctions/parser/asl/component/common/timeouts/timeout.py +++ b/moto/stepfunctions/parser/asl/component/common/timeouts/timeout.py @@ -8,12 +8,10 @@ from moto.stepfunctions.parser.asl.utils.json_path import JSONPathUtils class Timeout(EvalComponent, abc.ABC): @abc.abstractmethod - def is_default_value(self) -> bool: - ... + def is_default_value(self) -> bool: ... @abc.abstractmethod - def _eval_seconds(self, env: Environment) -> int: - ... + def _eval_seconds(self, env: Environment) -> int: ... def _eval_body(self, env: Environment) -> None: seconds = self._eval_seconds(env=env) diff --git a/moto/stepfunctions/parser/asl/component/intrinsic/member.py b/moto/stepfunctions/parser/asl/component/intrinsic/member.py index 05174b958..b4c5f8392 100644 --- a/moto/stepfunctions/parser/asl/component/intrinsic/member.py +++ b/moto/stepfunctions/parser/asl/component/intrinsic/member.py @@ -3,8 +3,7 @@ from typing import Final from moto.stepfunctions.parser.asl.component.intrinsic.component import Component -class Member(Component): - ... +class Member(Component): ... class IdentifiedMember(Member): diff --git a/moto/stepfunctions/parser/asl/component/state/state.py b/moto/stepfunctions/parser/asl/component/state/state.py index 2cc1e2128..52d407fff 100644 --- a/moto/stepfunctions/parser/asl/component/state/state.py +++ b/moto/stepfunctions/parser/asl/component/state/state.py @@ -60,12 +60,12 @@ class CommonStateField(EvalComponent, ABC): # If omitted, it has the value $ which designates the entire output. self.output_path: OutputPath = OutputPath(OutputPath.DEFAULT_PATH) - self.state_entered_event_type: Final[ - HistoryEventType - ] = state_entered_event_type - self.state_exited_event_type: Final[ - Optional[HistoryEventType] - ] = state_exited_event_type + self.state_entered_event_type: Final[HistoryEventType] = ( + state_entered_event_type + ) + self.state_exited_event_type: Final[Optional[HistoryEventType]] = ( + state_exited_event_type + ) def from_state_props(self, state_props: StateProps) -> None: self.name = state_props.name @@ -122,8 +122,7 @@ class CommonStateField(EvalComponent, ABC): ) @abc.abstractmethod - def _eval_state(self, env: Environment) -> None: - ... + def _eval_state(self, env: Environment) -> None: ... def _eval_body(self, env: Environment) -> None: env.event_history.add_event( diff --git a/moto/stepfunctions/parser/asl/component/state/state_choice/comparison/comparison.py b/moto/stepfunctions/parser/asl/component/state/state_choice/comparison/comparison.py index 03fb7ca68..726e99e7d 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_choice/comparison/comparison.py +++ b/moto/stepfunctions/parser/asl/component/state/state_choice/comparison/comparison.py @@ -3,5 +3,4 @@ from abc import ABC from moto.stepfunctions.parser.asl.component.eval_component import EvalComponent -class Comparison(EvalComponent, ABC): - ... +class Comparison(EvalComponent, ABC): ... diff --git a/moto/stepfunctions/parser/asl/component/state/state_continue_with.py b/moto/stepfunctions/parser/asl/component/state/state_continue_with.py index 1c7b1f891..d8c601941 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_continue_with.py +++ b/moto/stepfunctions/parser/asl/component/state/state_continue_with.py @@ -3,8 +3,7 @@ import abc from moto.stepfunctions.parser.asl.component.common.flow.next import Next -class ContinueWith(abc.ABC): - ... +class ContinueWith(abc.ABC): ... class ContinueWithEnd(ContinueWith): diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/execute_state.py b/moto/stepfunctions/parser/asl/component/state/state_execution/execute_state.py index b5ab7f845..a37e756ea 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/execute_state.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/execute_state.py @@ -147,8 +147,7 @@ class ExecutionState(CommonStateField, abc.ABC): ) @abc.abstractmethod - def _eval_execution(self, env: Environment) -> None: - ... + def _eval_execution(self, env: Environment) -> None: ... def _handle_retry( self, env: Environment, failure_event: FailureEvent diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/reader_config/max_items_decl.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/reader_config/max_items_decl.py index cf086c2f4..15caa4d67 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/reader_config/max_items_decl.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/reader_config/max_items_decl.py @@ -9,8 +9,7 @@ from moto.stepfunctions.parser.asl.eval.environment import Environment class MaxItemsDecl(EvalComponent, abc.ABC): @abc.abstractmethod - def _get_value(self, env: Environment) -> int: - ... + def _get_value(self, env: Environment) -> int: ... def _eval_body(self, env: Environment) -> None: max_items: int = self._get_value(env=env) diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/reader_config/reader_config_decl.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/reader_config/reader_config_decl.py index b3abb871e..e1d2a7e28 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/reader_config/reader_config_decl.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/reader_config/reader_config_decl.py @@ -68,9 +68,9 @@ class ReaderConfig(EvalComponent): MaxItemsValue=max_items_value, ) if self.csv_header_location: - reader_config_output[ - "CSVHeaderLocation" - ] = self.csv_header_location.csv_header_location_value.value + reader_config_output["CSVHeaderLocation"] = ( + self.csv_header_location.csv_header_location_value.value + ) if self.csv_headers: reader_config_output["CSVHeaders"] = self.csv_headers.header_names env.stack.append(reader_config_output) diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/resource_eval/resource_eval.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/resource_eval/resource_eval.py index 955a4ba9a..e005262d6 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/resource_eval/resource_eval.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/resource_eval/resource_eval.py @@ -13,5 +13,4 @@ class ResourceEval(abc.ABC): def __init__(self, resource: ServiceResource): self.resource = resource - def eval_resource(self, env: Environment) -> None: - ... + def eval_resource(self, env: Environment) -> None: ... diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/resource_eval/resource_output_transformer/resource_output_transformer.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/resource_eval/resource_output_transformer/resource_output_transformer.py index cc5225df9..69247f691 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/resource_eval/resource_output_transformer/resource_output_transformer.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/item_reader/resource_eval/resource_output_transformer/resource_output_transformer.py @@ -3,5 +3,4 @@ import abc from moto.stepfunctions.parser.asl.component.eval_component import EvalComponent -class ResourceOutputTransformer(EvalComponent, abc.ABC): - ... +class ResourceOutputTransformer(EvalComponent, abc.ABC): ... diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/distributed_iteration_component.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/distributed_iteration_component.py index 5181b852b..8afe928e2 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/distributed_iteration_component.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/distributed_iteration_component.py @@ -66,8 +66,7 @@ class DistributedIterationComponent(InlineIterationComponent, abc.ABC): self._workers = list() @abc.abstractmethod - def _create_worker(self, env: Environment) -> IterationWorker: - ... + def _create_worker(self, env: Environment) -> IterationWorker: ... def _launch_worker(self, env: Environment) -> IterationWorker: worker = super()._launch_worker(env=env) diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/inline_iteration_component.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/inline_iteration_component.py index 4791c89ab..35b43e6d1 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/inline_iteration_component.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/inline_iteration_component.py @@ -52,8 +52,7 @@ class InlineIterationComponent(IterationComponent, abc.ABC): self._job_pool = None @abc.abstractmethod - def _create_worker(self, env: Environment) -> IterationWorker: - ... + def _create_worker(self, env: Environment) -> IterationWorker: ... def _launch_worker(self, env: Environment) -> IterationWorker: worker = self._create_worker(env=env) diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/iteration_worker.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/iteration_worker.py index ff5619be5..9bd11af37 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/iteration_worker.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_map/iteration/iteration_worker.py @@ -50,8 +50,7 @@ class IterationWorker(abc.ABC): return self._stop_signal_received @abc.abstractmethod - def _eval_input(self, env_frame: Environment) -> None: - ... + def _eval_input(self, env_frame: Environment) -> None: ... def _eval_job(self, env: Environment, job: Job) -> None: map_iteration_event_details = MapIterationEventDetails( diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_parallel/branch_worker.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_parallel/branch_worker.py index 21925e224..e023fd289 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_parallel/branch_worker.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_parallel/branch_worker.py @@ -14,8 +14,7 @@ LOG = logging.getLogger(__name__) class BranchWorker: class BranchWorkerComm(abc.ABC): @abc.abstractmethod - def on_terminated(self, env: Environment): - ... + def on_terminated(self, env: Environment): ... _branch_worker_comm: Final[BranchWorkerComm] _program: Final[Program] diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_parallel/branches_decl.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_parallel/branches_decl.py index a83457f96..b6a362cfb 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_parallel/branches_decl.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_parallel/branches_decl.py @@ -82,9 +82,9 @@ class BranchesDecl(EvalComponent): branch_worker_pool.wait() # Propagate exception if parallel task failed. - exit_event_details: Optional[ - ExecutionFailedEventDetails - ] = branch_worker_pool.get_exit_event_details() + exit_event_details: Optional[ExecutionFailedEventDetails] = ( + branch_worker_pool.get_exit_event_details() + ) if exit_event_details is not None: for branch_worker in branch_workers: branch_worker.stop( diff --git a/moto/stepfunctions/parser/asl/component/state/state_execution/state_task/service/state_task_service.py b/moto/stepfunctions/parser/asl/component/state/state_execution/state_task/service/state_task_service.py index 1d1179766..c4cb47602 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_execution/state_task/service/state_task_service.py +++ b/moto/stepfunctions/parser/asl/component/state/state_execution/state_task/service/state_task_service.py @@ -75,9 +75,9 @@ class StateTaskService(StateTask, abc.ABC): parameters_bind_keys: List[str] = list(parameters.keys()) for parameter_key in parameters_bind_keys: norm_parameter_key = camel_to_snake_case(parameter_key) - norm_member_bind: Optional[ - Tuple[str, Optional[StructureShape]] - ] = norm_member_binds.get(norm_parameter_key) + norm_member_bind: Optional[Tuple[str, Optional[StructureShape]]] = ( + norm_member_binds.get(norm_parameter_key) + ) if norm_member_bind is not None: norm_member_bind_key, norm_member_bind_shape = norm_member_bind parameter_value = parameters.pop(parameter_key) @@ -143,8 +143,7 @@ class StateTaskService(StateTask, abc.ABC): env: Environment, resource_runtime_part: ResourceRuntimePart, normalised_parameters: dict, - ): - ... + ): ... def _before_eval_execution( self, diff --git a/moto/stepfunctions/parser/asl/component/state/state_wait/wait_function/wait_function.py b/moto/stepfunctions/parser/asl/component/state/state_wait/wait_function/wait_function.py index 01a9233e2..0adec4e01 100644 --- a/moto/stepfunctions/parser/asl/component/state/state_wait/wait_function/wait_function.py +++ b/moto/stepfunctions/parser/asl/component/state/state_wait/wait_function/wait_function.py @@ -10,8 +10,7 @@ LOG = logging.getLogger(__name__) class WaitFunction(EvalComponent, abc.ABC): @abc.abstractmethod - def _get_wait_seconds(self, env: Environment) -> int: - ... + def _get_wait_seconds(self, env: Environment) -> int: ... def _wait_interval(self, env: Environment, wait_seconds: int) -> None: t0 = time.time() diff --git a/moto/stepfunctions/parser/asl/eval/callback/callback.py b/moto/stepfunctions/parser/asl/eval/callback/callback.py index 24569ac47..739161b2a 100644 --- a/moto/stepfunctions/parser/asl/eval/callback/callback.py +++ b/moto/stepfunctions/parser/asl/eval/callback/callback.py @@ -37,8 +37,7 @@ class CallbackTimeoutError(TimeoutError): pass -class CallbackConsumerError(abc.ABC): - ... +class CallbackConsumerError(abc.ABC): ... class CallbackConsumerTimeout(CallbackConsumerError): @@ -150,9 +149,9 @@ class CallbackPoolManager: if callback_endpoint is None: return False - consumer_error: Optional[ - CallbackConsumerError - ] = callback_endpoint.consumer_error + consumer_error: Optional[CallbackConsumerError] = ( + callback_endpoint.consumer_error + ) if consumer_error is not None: raise CallbackNotifyConsumerError(callback_consumer_error=consumer_error) @@ -164,9 +163,9 @@ class CallbackPoolManager: if callback_endpoint is None: return False - consumer_error: Optional[ - CallbackConsumerError - ] = callback_endpoint.consumer_error + consumer_error: Optional[CallbackConsumerError] = ( + callback_endpoint.consumer_error + ) if consumer_error is not None: raise CallbackNotifyConsumerError(callback_consumer_error=consumer_error) diff --git a/moto/stepfunctions/parser/asl/eval/program_state.py b/moto/stepfunctions/parser/asl/eval/program_state.py index 5a91bcfa8..65c1c6c70 100644 --- a/moto/stepfunctions/parser/asl/eval/program_state.py +++ b/moto/stepfunctions/parser/asl/eval/program_state.py @@ -4,8 +4,7 @@ from typing import Final, Optional from moto.stepfunctions.parser.api import ExecutionFailedEventDetails, Timestamp -class ProgramState(abc.ABC): - ... +class ProgramState(abc.ABC): ... class ProgramEnded(ProgramState): diff --git a/moto/stepfunctions/parser/backend/execution_worker_comm.py b/moto/stepfunctions/parser/backend/execution_worker_comm.py index 4602d1339..cb82687d7 100644 --- a/moto/stepfunctions/parser/backend/execution_worker_comm.py +++ b/moto/stepfunctions/parser/backend/execution_worker_comm.py @@ -9,5 +9,4 @@ class ExecutionWorkerComm(abc.ABC): """ @abc.abstractmethod - def terminated(self) -> None: - ... + def terminated(self) -> None: ... diff --git a/moto/stepfunctions/parser/backend/state_machine.py b/moto/stepfunctions/parser/backend/state_machine.py index 93863677a..d04494bcb 100644 --- a/moto/stepfunctions/parser/backend/state_machine.py +++ b/moto/stepfunctions/parser/backend/state_machine.py @@ -78,8 +78,7 @@ class StateMachineInstance: return describe_output @abc.abstractmethod - def itemise(self): - ... + def itemise(self): ... class TagManager: diff --git a/moto/stepfunctions/parser/models.py b/moto/stepfunctions/parser/models.py index eed8a4fd8..a2c1af363 100644 --- a/moto/stepfunctions/parser/models.py +++ b/moto/stepfunctions/parser/models.py @@ -84,7 +84,6 @@ class StepFunctionsParserBackend(StepFunctionBackend): roleArn: str, tags: Optional[List[Dict[str, str]]] = None, ) -> StateMachine: - StepFunctionsParserBackend._validate_definition(definition=definition) return super().create_state_machine( @@ -208,9 +207,9 @@ class StepFunctionsParserBackend(StepFunctionBackend): def describe_map_run(self, map_run_arn: str) -> Dict[str, Any]: for execution in self._get_executions(): - map_run_record: Optional[ - MapRunRecord - ] = execution.exec_worker.env.map_run_record_pool_manager.get(map_run_arn) + map_run_record: Optional[MapRunRecord] = ( + execution.exec_worker.env.map_run_record_pool_manager.get(map_run_arn) + ) if map_run_record is not None: return map_run_record.describe() raise ResourceNotFound() @@ -220,9 +219,9 @@ class StepFunctionsParserBackend(StepFunctionBackend): Pagination is not yet implemented """ execution = self.describe_execution(execution_arn=execution_arn) - map_run_records: List[ - MapRunRecord - ] = execution.exec_worker.env.map_run_record_pool_manager.get_all() + map_run_records: List[MapRunRecord] = ( + execution.exec_worker.env.map_run_record_pool_manager.get_all() + ) return dict( mapRuns=[map_run_record.to_json() for map_run_record in map_run_records] ) diff --git a/moto/stepfunctions/responses.py b/moto/stepfunctions/responses.py index a505076e7..4ad3aa1be 100644 --- a/moto/stepfunctions/responses.py +++ b/moto/stepfunctions/responses.py @@ -14,7 +14,9 @@ class StepFunctionResponse(BaseResponse): @property def stepfunction_backend(self) -> StepFunctionBackend: - if default_user_config.get("stepfunctions", {}).get("execute_state_machine", False): + if default_user_config.get("stepfunctions", {}).get( + "execute_state_machine", False + ): from .parser.models import stepfunctions_parser_backends return stepfunctions_parser_backends[self.current_account][self.region] diff --git a/moto/swf/models/__init__.py b/moto/swf/models/__init__.py index 6e13dcc31..4efdc53e1 100644 --- a/moto/swf/models/__init__.py +++ b/moto/swf/models/__init__.py @@ -196,7 +196,9 @@ class SWFBackend(BaseBackend): **kwargs: Any, ) -> WorkflowExecution: domain = self._get_domain(domain_name) - wf_type: WorkflowType = domain.get_type("workflow", workflow_name, workflow_version) # type: ignore + wf_type: WorkflowType = domain.get_type( + "workflow", workflow_name, workflow_version + ) # type: ignore if wf_type.status == "DEPRECATED": raise SWFTypeDeprecatedFault(wf_type) wfe = WorkflowExecution( diff --git a/moto/swf/models/domain.py b/moto/swf/models/domain.py index c188789ee..a78935430 100644 --- a/moto/swf/models/domain.py +++ b/moto/swf/models/domain.py @@ -50,9 +50,9 @@ class Domain(BaseModel): hsh = {"name": self.name, "status": self.status} if self.description: hsh["description"] = self.description - hsh[ - "arn" - ] = f"arn:aws:swf:{self.region_name}:{self.account_id}:/domain/{self.name}" + hsh["arn"] = ( + f"arn:aws:swf:{self.region_name}:{self.account_id}:/domain/{self.name}" + ) return hsh def to_full_dict(self) -> Dict[str, Any]: @@ -61,7 +61,9 @@ class Domain(BaseModel): "configuration": {"workflowExecutionRetentionPeriodInDays": self.retention}, } - def get_type(self, kind: str, name: str, version: str, ignore_empty: bool = False) -> "GenericType": # type: ignore + def get_type( + self, kind: str, name: str, version: str, ignore_empty: bool = False + ) -> "GenericType": # type: ignore try: return self.types[kind][name][version] except KeyError: diff --git a/moto/swf/models/workflow_execution.py b/moto/swf/models/workflow_execution.py index 5918fb749..5b6aa0d49 100644 --- a/moto/swf/models/workflow_execution.py +++ b/moto/swf/models/workflow_execution.py @@ -25,7 +25,6 @@ from .workflow_type import WorkflowType # TODO: extract decision related logic into a Decision class class WorkflowExecution(BaseModel): - # NB: the list is ordered exactly as in SWF validation exceptions so we can # mimic error messages closely ; don't reorder it without checking SWF. KNOWN_DECISION_TYPES = [ diff --git a/moto/textract/exceptions.py b/moto/textract/exceptions.py index 3391df15e..bb5f07a15 100644 --- a/moto/textract/exceptions.py +++ b/moto/textract/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the textract service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/textract/responses.py b/moto/textract/responses.py index 421f188de..adc95b0ff 100644 --- a/moto/textract/responses.py +++ b/moto/textract/responses.py @@ -1,4 +1,5 @@ """Handles incoming textract requests, invokes methods, returns responses.""" + import json from moto.core.responses import BaseResponse diff --git a/moto/textract/urls.py b/moto/textract/urls.py index d5b9cf74f..001109403 100644 --- a/moto/textract/urls.py +++ b/moto/textract/urls.py @@ -1,4 +1,5 @@ """textract base URL and path.""" + from .responses import TextractResponse url_bases = [ diff --git a/moto/timestreamwrite/exceptions.py b/moto/timestreamwrite/exceptions.py index 18efc08d5..35d901485 100644 --- a/moto/timestreamwrite/exceptions.py +++ b/moto/timestreamwrite/exceptions.py @@ -1,4 +1,5 @@ """Exceptions raised by the timestreamwrite service.""" + from moto.core.exceptions import JsonRESTError diff --git a/moto/transcribe/models.py b/moto/transcribe/models.py index 6d40ace83..fb81d894f 100644 --- a/moto/transcribe/models.py +++ b/moto/transcribe/models.py @@ -559,7 +559,6 @@ class TranscribeBackend(BaseBackend): specialty: str, type_: str, ) -> Dict[str, Any]: - if medical_transcription_job_name in self.medical_transcriptions: raise ConflictException( message="The requested job name already exists. Use a different job name." @@ -586,9 +585,9 @@ class TranscribeBackend(BaseBackend): job_type=type_, ) - self.medical_transcriptions[ - medical_transcription_job_name - ] = transcription_job_object + self.medical_transcriptions[medical_transcription_job_name] = ( + transcription_job_object + ) return transcription_job_object.response_object("CREATE") diff --git a/moto/utilities/aws_headers.py b/moto/utilities/aws_headers.py index 15856a4cb..110bc9055 100644 --- a/moto/utilities/aws_headers.py +++ b/moto/utilities/aws_headers.py @@ -12,8 +12,7 @@ TypeDec = TypeVar("TypeDec", bound=Callable[..., Any]) class GenericFunction(Protocol): - def __call__(self, *args: Any, **kwargs: Any) -> Any: - ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def gen_amz_crc32(response: Any, headerdict: Optional[Dict[str, Any]] = None) -> int: diff --git a/moto/utilities/distutils_version.py b/moto/utilities/distutils_version.py index a1c90766c..a9e20e473 100644 --- a/moto/utilities/distutils_version.py +++ b/moto/utilities/distutils_version.py @@ -161,7 +161,6 @@ class Version: class LooseVersion(Version): - """Version numbering for anarchists and software realists. Implements the standard interface for version number classes as described above. A version number consists of a series of numbers, diff --git a/moto/utilities/paginator.py b/moto/utilities/paginator.py index 078b51008..54f80ba46 100644 --- a/moto/utilities/paginator.py +++ b/moto/utilities/paginator.py @@ -21,13 +21,12 @@ T = TypeVar("T") class GenericFunction(Protocol): def __call__( self, func: "Callable[P1, List[T]]" - ) -> "Callable[P2, Tuple[List[T], Optional[str]]]": - ... + ) -> "Callable[P2, Tuple[List[T], Optional[str]]]": ... def paginate(pagination_model: Dict[str, Any]) -> GenericFunction: def pagination_decorator( - func: Callable[..., List[T]] + func: Callable[..., List[T]], ) -> Callable[..., Tuple[List[T], Optional[str]]]: @wraps(func) def pagination_wrapper(*args: Any, **kwargs: Any) -> Any: # type: ignore diff --git a/moto/utilities/tagging_service.py b/moto/utilities/tagging_service.py index 66d934084..e3da14efc 100644 --- a/moto/utilities/tagging_service.py +++ b/moto/utilities/tagging_service.py @@ -1,4 +1,5 @@ """Tag functionality contained in class TaggingService.""" + import re from typing import Dict, List, Optional @@ -172,7 +173,7 @@ class TaggingService: @staticmethod def convert_dict_to_tags_input( - tags: Optional[Dict[str, str]] + tags: Optional[Dict[str, str]], ) -> List[Dict[str, str]]: """Given a dictionary, return generic boto params for tags""" if not tags: diff --git a/moto/xray/models.py b/moto/xray/models.py index 194e3b001..46a5f7c07 100644 --- a/moto/xray/models.py +++ b/moto/xray/models.py @@ -54,7 +54,7 @@ class TraceSegment(BaseModel): metadata: Any = None, annotations: Any = None, subsegments: Any = None, - **kwargs: Any + **kwargs: Any, ): self.name = name self.id = segment_id diff --git a/requirements-dev.txt b/requirements-dev.txt index 96261eac6..5b9f5945d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ -e .[all,server] -r requirements-tests.txt -black==22.3.0 +ruff==0.3.3 click inflection lxml diff --git a/requirements-tests.txt b/requirements-tests.txt index a84560db4..a86f90436 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -5,4 +5,3 @@ pytest-order pytest-xdist freezegun pylint -ruff diff --git a/scripts/update_backend_index.py b/scripts/update_backend_index.py index c956d61db..53aabe43f 100755 --- a/scripts/update_backend_index.py +++ b/scripts/update_backend_index.py @@ -5,8 +5,8 @@ import os import re from moto.backends import list_of_moto_modules from pathlib import Path +import subprocess -import black import pprint output_file = "moto/backend_index.py" @@ -75,13 +75,8 @@ def main(): pprint.pprint(index, fd) fd.write(os.linesep) - print("format with black") - black.format_file_in_place( - Path(output_path), - fast=False, - mode=black.FileMode(), - write_back=black.WriteBack.YES, - ) + print("format with ruff") + subprocess.run(["ruff", "format", Path(output_path)]) if __name__ == "__main__": diff --git a/tests/test_apigatewayv2/test_apigatewayv2.py b/tests/test_apigatewayv2/test_apigatewayv2.py index 0726c5f63..4afcb0fc3 100644 --- a/tests/test_apigatewayv2/test_apigatewayv2.py +++ b/tests/test_apigatewayv2/test_apigatewayv2.py @@ -1,4 +1,5 @@ """Unit tests for apigatewayv2-supported APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_autoscaling/test_autoscaling_cloudformation.py b/tests/test_autoscaling/test_autoscaling_cloudformation.py index 66694f4a3..ae24104db 100644 --- a/tests/test_autoscaling/test_autoscaling_cloudformation.py +++ b/tests/test_autoscaling/test_autoscaling_cloudformation.py @@ -26,9 +26,7 @@ Resources: Outputs: LaunchConfigurationName: Value: !Ref LaunchConfiguration -""".strip().format( - EXAMPLE_AMI_ID - ) +""".strip().format(EXAMPLE_AMI_ID) cf_client.create_stack(StackName=stack_name, TemplateBody=cf_template) stack = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0] @@ -50,9 +48,7 @@ Resources: Outputs: LaunchConfigurationName: Value: !Ref LaunchConfiguration -""".strip().format( - EXAMPLE_AMI_ID - ) +""".strip().format(EXAMPLE_AMI_ID) cf_client.update_stack(StackName=stack_name, TemplateBody=cf_template) stack = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0] diff --git a/tests/test_awslambda/test_lambda_alias.py b/tests/test_awslambda/test_lambda_alias.py index 7c12df8f5..a43d5ce80 100644 --- a/tests/test_awslambda/test_lambda_alias.py +++ b/tests/test_awslambda/test_lambda_alias.py @@ -1,4 +1,5 @@ """Unit tests for lambda-supported APIs.""" + from uuid import uuid4 import boto3 diff --git a/tests/test_backup/test_backup.py b/tests/test_backup/test_backup.py index e5ff7ed19..54eb0c8a3 100644 --- a/tests/test_backup/test_backup.py +++ b/tests/test_backup/test_backup.py @@ -1,4 +1,5 @@ """Unit tests for backup-supported APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_cloudformation/test_cloudformation_stack_integration.py b/tests/test_cloudformation/test_cloudformation_stack_integration.py index 3be5d3083..3b067d6dd 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_integration.py +++ b/tests/test_cloudformation/test_cloudformation_stack_integration.py @@ -682,9 +682,9 @@ def test_update_stack_listener_and_rule(): elbv2_conn = boto3.client("elbv2", "us-west-1") - initial_template["Resources"]["rule"]["Properties"]["Conditions"][0][ - "Field" - ] = "host-header" + initial_template["Resources"]["rule"]["Properties"]["Conditions"][0]["Field"] = ( + "host-header" + ) initial_template["Resources"]["rule"]["Properties"]["Conditions"][0]["Values"] = "*" initial_template["Resources"]["listener"]["Properties"]["Port"] = 90 @@ -1710,9 +1710,9 @@ def test_ssm_parameter_update_stack(): assert parameters[0]["Name"] == "test_ssm" assert parameters[0]["Value"] == "Test SSM Parameter" - parameter_template["Resources"]["BasicParameter"]["Properties"][ - "Value" - ] = "Test SSM Parameter Updated" + parameter_template["Resources"]["BasicParameter"]["Properties"]["Value"] = ( + "Test SSM Parameter Updated" + ) cfn.update_stack(StackName=stack_name, TemplateBody=json.dumps(parameter_template)) ssm_client = boto3.client("ssm", region_name="us-west-2") diff --git a/tests/test_cloudtrail/test_server.py b/tests/test_cloudtrail/test_server.py index 5cb1b6dd7..a0e20aa2e 100644 --- a/tests/test_cloudtrail/test_server.py +++ b/tests/test_cloudtrail/test_server.py @@ -1,4 +1,5 @@ """Test different server responses.""" + import json import moto.server as server diff --git a/tests/test_cognitoidentity/test_server.py b/tests/test_cognitoidentity/test_server.py index 07742274e..57d0fcb60 100644 --- a/tests/test_cognitoidentity/test_server.py +++ b/tests/test_cognitoidentity/test_server.py @@ -10,7 +10,6 @@ Test the different server responses @mock_aws def test_create_identity_pool(): - backend = server.create_backend_app("cognito-identity") test_client = backend.test_client() diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py index ee4691d99..567727b8b 100644 --- a/tests/test_cognitoidp/test_cognitoidp.py +++ b/tests/test_cognitoidp/test_cognitoidp.py @@ -3679,7 +3679,6 @@ def test_create_resource_server_with_no_scopes(): @mock_aws def test_describe_resource_server(): - # Create a user pool to attach a resource server to client = boto3.client("cognito-idp", "us-west-2") name = str(uuid.uuid4()) @@ -3736,7 +3735,6 @@ def test_describe_resource_server(): @mock_aws def test_list_resource_servers_empty_set(): - # Create a user pool to attach a resource server to client = boto3.client("cognito-idp", "us-west-2") name = str(uuid.uuid4()) @@ -3762,7 +3760,6 @@ def test_list_resource_servers_empty_set(): @mock_aws def test_list_resource_servers_single_page(): - # Create a user pool to attach a resource server to client = boto3.client("cognito-idp", "us-west-2") name = str(uuid.uuid4()) @@ -3818,7 +3815,6 @@ def test_list_resource_servers_single_page(): @mock_aws def test_list_resource_servers_multi_page(): - # Create a user pool to attach a resource server to client = boto3.client("cognito-idp", "us-west-2") name = str(uuid.uuid4()) diff --git a/tests/test_config/test_config_rules.py b/tests/test_config/test_config_rules.py index b91d7f23a..513e1ebaa 100644 --- a/tests/test_config/test_config_rules.py +++ b/tests/test_config/test_config_rules.py @@ -1,10 +1,11 @@ """Unit tests specific to the ConfigService ConfigRule APIs. - These APIs include: - put_config_rule - describe_config_rule - delete_config_rule +These APIs include: + put_config_rule + describe_config_rule + delete_config_rule """ + import json from string import ascii_lowercase diff --git a/tests/test_config/test_config_rules_integration.py b/tests/test_config/test_config_rules_integration.py index bccb05fa0..286ea7fee 100644 --- a/tests/test_config/test_config_rules_integration.py +++ b/tests/test_config/test_config_rules_integration.py @@ -143,9 +143,9 @@ def test_config_rules_source_details_errors(): ) custom_rule = custom_config_rule() - custom_rule["Source"]["SourceDetails"][0][ - "MessageType" - ] = "ConfigurationItemChangeNotification" + custom_rule["Source"]["SourceDetails"][0]["MessageType"] = ( + "ConfigurationItemChangeNotification" + ) with pytest.raises(ClientError) as exc: client.put_config_rule(ConfigRule=custom_rule) err = exc.value.response["Error"] diff --git a/tests/test_config/test_config_tags.py b/tests/test_config/test_config_tags.py index 6320b5086..a6756cbb5 100644 --- a/tests/test_config/test_config_tags.py +++ b/tests/test_config/test_config_tags.py @@ -1,11 +1,12 @@ """Unit tests specific to the tag-related ConfigService APIs. - These APIs include: - list_tags_for_resource - tag_resource - untag_resource +These APIs include: + list_tags_for_resource + tag_resource + untag_resource """ + import boto3 import pytest from botocore.exceptions import ClientError, ParamValidationError diff --git a/tests/test_core/test_decorator_calls.py b/tests/test_core/test_decorator_calls.py index 327c3fa13..58bc2c22c 100644 --- a/tests/test_core/test_decorator_calls.py +++ b/tests/test_core/test_decorator_calls.py @@ -50,7 +50,6 @@ def test_context_manager(aws_credentials: Any) -> None: # type: ignore[misc] # @mock.patch.dict(os.environ, {"MOTO_CALL_RESET_API": "false"}) @pytest.mark.parametrize("mock_class", [mock_aws, ServerModeMockAWS, ProxyModeMockAWS]) def test_context_decorator_exposes_bare_essentials(mock_class: Any) -> None: # type: ignore - # Verify we're only exposing the necessary methods with mock_class() as m: exposed_attributes = [a for a in m.__dict__.keys() if not a.startswith("_")] @@ -85,7 +84,10 @@ def test_decorater_wrapped_gets_set() -> None: """ Moto decorator's __wrapped__ should get set to the tests function """ - assert test_decorater_wrapped_gets_set.__wrapped__.__name__ == "test_decorater_wrapped_gets_set" # type: ignore + assert ( + test_decorater_wrapped_gets_set.__wrapped__.__name__ + == "test_decorater_wrapped_gets_set" + ) # type: ignore @mock_aws diff --git a/tests/test_core/test_ec2_vpc_endpoint_services.py b/tests/test_core/test_ec2_vpc_endpoint_services.py index 9b0ce6515..00936092e 100644 --- a/tests/test_core/test_ec2_vpc_endpoint_services.py +++ b/tests/test_core/test_ec2_vpc_endpoint_services.py @@ -75,8 +75,9 @@ def test_describe_vpc_default_endpoint_services() -> None: assert all_names[1] == partial_services["ServiceNames"][1] assert all_names[0] == partial_services["ServiceDetails"][0]["ServiceName"] assert all_names[1] == partial_services["ServiceDetails"][1]["ServiceName"] - assert partial_services["NextToken"] == ( - all_services["ServiceDetails"][2]["ServiceId"] + assert ( + partial_services["NextToken"] + == (all_services["ServiceDetails"][2]["ServiceId"]) ) # Use the next token to receive another service. diff --git a/tests/test_core/test_request_passthrough.py b/tests/test_core/test_request_passthrough.py index a563a9856..872c99ff4 100644 --- a/tests/test_core/test_request_passthrough.py +++ b/tests/test_core/test_request_passthrough.py @@ -99,7 +99,6 @@ def test_passthrough_calls_for_wildcard_urls() -> None: with patch.dict( os.environ, {"AWS_ACCESS_KEY_ID": "a", "AWS_SECRET_ACCESS_KEY": "b"} ): - # All requests to these URL's are passed through with mock_aws( config={ diff --git a/tests/test_dax/test_dax.py b/tests/test_dax/test_dax.py index 688a1a257..518ff59b6 100644 --- a/tests/test_dax/test_dax.py +++ b/tests/test_dax/test_dax.py @@ -1,4 +1,5 @@ """Unit tests for dax-supported APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_ds/test_ds.py b/tests/test_ds/test_ds.py index 7d6a94e2b..d6f6988ac 100644 --- a/tests/test_ds/test_ds.py +++ b/tests/test_ds/test_ds.py @@ -3,6 +3,7 @@ Simple AD directories are used for test data, but the operations are common to the other directory types. """ + from datetime import datetime, timezone import boto3 diff --git a/tests/test_ds/test_ds_ad_connect.py b/tests/test_ds/test_ds_ad_connect.py index 0d3f22ddb..e95bc4c41 100644 --- a/tests/test_ds/test_ds_ad_connect.py +++ b/tests/test_ds/test_ds_ad_connect.py @@ -3,6 +3,7 @@ The logic to check the details of VPCs and Subnets is shared between the "create directory" APIs, so it will not be repeated here. """ + from datetime import datetime, timezone import boto3 diff --git a/tests/test_ds/test_ds_microsoft_ad.py b/tests/test_ds/test_ds_microsoft_ad.py index dc59cc6cb..630b1e7ea 100644 --- a/tests/test_ds/test_ds_microsoft_ad.py +++ b/tests/test_ds/test_ds_microsoft_ad.py @@ -3,6 +3,7 @@ The logic to check the details of VPCs and Subnets is shared between the "create directory" APIs, so it will not be repeated here. """ + from datetime import datetime, timezone import boto3 diff --git a/tests/test_ds/test_ds_tags.py b/tests/test_ds/test_ds_tags.py index f8da4bfff..e47e9202d 100644 --- a/tests/test_ds/test_ds_tags.py +++ b/tests/test_ds/test_ds_tags.py @@ -3,6 +3,7 @@ Simple AD directories are used for test data, but the operations are common to the other directory types. """ + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_ec2/test_fleets.py b/tests/test_ec2/test_fleets.py index be04fbf3c..e2db4ceba 100644 --- a/tests/test_ec2/test_fleets.py +++ b/tests/test_ec2/test_fleets.py @@ -385,7 +385,6 @@ def test_create_fleet_using_launch_template_config__overrides(): @ec2_aws_verified def test_delete_fleet(): with launch_template_context() as ctxt: - fleet_res = ctxt.ec2.create_fleet( LaunchTemplateConfigs=[ { diff --git a/tests/test_ec2/test_security_groups_cloudformation.py b/tests/test_ec2/test_security_groups_cloudformation.py index 8f935db85..9a41d9d5f 100644 --- a/tests/test_ec2/test_security_groups_cloudformation.py +++ b/tests/test_ec2/test_security_groups_cloudformation.py @@ -225,9 +225,9 @@ def test_stack_security_groups(): first_desc = str(uuid4()) second_desc = str(uuid4()) our_template = SEC_GROUP_SOURCE.copy() - our_template["Resources"]["my-security-group"]["Properties"][ - "GroupDescription" - ] = second_desc + our_template["Resources"]["my-security-group"]["Properties"]["GroupDescription"] = ( + second_desc + ) our_template["Resources"]["InstanceSecurityGroup"]["Properties"][ "GroupDescription" ] = first_desc diff --git a/tests/test_ec2/test_server.py b/tests/test_ec2/test_server.py index 3010ec597..02bb9d500 100644 --- a/tests/test_ec2/test_server.py +++ b/tests/test_ec2/test_server.py @@ -1,4 +1,5 @@ """Test the different server responses.""" + import re import xmltodict diff --git a/tests/test_ecr/test_ecr_cloudformation.py b/tests/test_ecr/test_ecr_cloudformation.py index d23b6ee44..4f6ede1d0 100644 --- a/tests/test_ecr/test_ecr_cloudformation.py +++ b/tests/test_ecr/test_ecr_cloudformation.py @@ -61,9 +61,9 @@ def test_update_repository(): cfn_client.create_stack(StackName=stack_name, TemplateBody=template) template_update = copy.deepcopy(json.loads(template)) - template_update["Resources"]["Repo"]["Properties"][ - "ImageTagMutability" - ] = "IMMUTABLE" + template_update["Resources"]["Repo"]["Properties"]["ImageTagMutability"] = ( + "IMMUTABLE" + ) # when cfn_client.update_stack( diff --git a/tests/test_ecs/test_ecs_cloudformation.py b/tests/test_ecs/test_ecs_cloudformation.py index 88a771261..292bb3d60 100644 --- a/tests/test_ecs/test_ecs_cloudformation.py +++ b/tests/test_ecs/test_ecs_cloudformation.py @@ -36,9 +36,9 @@ def test_update_task_definition_family_through_cloudformation_should_trigger_a_r cfn_conn.create_stack(StackName="test_stack", TemplateBody=template1_json) template2 = deepcopy(template1) - template2["Resources"]["testTaskDefinition"]["Properties"][ - "Family" - ] = "testTaskDefinition2" + template2["Resources"]["testTaskDefinition"]["Properties"]["Family"] = ( + "testTaskDefinition2" + ) template2_json = json.dumps(template2) cfn_conn.update_stack(StackName="test_stack", TemplateBody=template2_json) diff --git a/tests/test_efs/test_mount_target.py b/tests/test_efs/test_mount_target.py index 604c6ace8..a5fdbbc33 100644 --- a/tests/test_efs/test_mount_target.py +++ b/tests/test_efs/test_mount_target.py @@ -206,9 +206,7 @@ def test_create_mount_target_too_many_security_groups(efs, ec2, file_system, sub assert "SecurityGroupLimitExceeded" == resp["Error"]["Code"] -def test_delete_file_system_mount_targets_attached( - efs, ec2, file_system, subnet -): # pylint: disable=unused-argument +def test_delete_file_system_mount_targets_attached(efs, ec2, file_system, subnet): # pylint: disable=unused-argument efs.create_mount_target( FileSystemId=file_system["FileSystemId"], SubnetId=subnet["SubnetId"] ) @@ -219,9 +217,7 @@ def test_delete_file_system_mount_targets_attached( assert "FileSystemInUse" == resp["Error"]["Code"] -def test_describe_mount_targets_minimal_case( - efs, ec2, file_system, subnet -): # pylint: disable=unused-argument +def test_describe_mount_targets_minimal_case(efs, ec2, file_system, subnet): # pylint: disable=unused-argument create_resp = efs.create_mount_target( FileSystemId=file_system["FileSystemId"], SubnetId=subnet["SubnetId"] ) @@ -242,9 +238,7 @@ def test_describe_mount_targets_minimal_case( assert mount_target == create_resp -def test_describe_mount_targets__by_access_point_id( - efs, ec2, file_system, subnet -): # pylint: disable=unused-argument +def test_describe_mount_targets__by_access_point_id(efs, ec2, file_system, subnet): # pylint: disable=unused-argument create_resp = efs.create_mount_target( FileSystemId=file_system["FileSystemId"], SubnetId=subnet["SubnetId"] ) diff --git a/tests/test_eks/test_eks.py b/tests/test_eks/test_eks.py index 56d814673..615eedfd6 100644 --- a/tests/test_eks/test_eks.py +++ b/tests/test_eks/test_eks.py @@ -297,7 +297,7 @@ def test_create_cluster_throws_exception_when_cluster_exists(ClusterBuilder): with pytest.raises(ClientError) as raised_exception: client.create_cluster( name=generated_test_data.existing_cluster_name, - **dict(ClusterInputs.REQUIRED) + **dict(ClusterInputs.REQUIRED), ) count_clusters_after_test = len(client.list_clusters()[ResponseAttributes.CLUSTERS]) @@ -522,7 +522,7 @@ def test_create_nodegroup_throws_exception_when_cluster_not_found(): client.create_nodegroup( clusterName=non_existent_cluster_name, nodegroupName=mock_random.get_random_string(), - **dict(NodegroupInputs.REQUIRED) + **dict(NodegroupInputs.REQUIRED), ) assert_expected_exception(raised_exception, expected_exception, expected_msg) @@ -543,7 +543,7 @@ def test_create_nodegroup_throws_exception_when_nodegroup_already_exists( client.create_nodegroup( clusterName=generated_test_data.cluster_name, nodegroupName=generated_test_data.existing_nodegroup_name, - **dict(NodegroupInputs.REQUIRED) + **dict(NodegroupInputs.REQUIRED), ) count_nodegroups_after_test = len( client.list_nodegroups(clusterName=generated_test_data.cluster_name)[ @@ -570,7 +570,7 @@ def test_create_nodegroup_throws_exception_when_cluster_not_active(NodegroupBuil client.create_nodegroup( clusterName=generated_test_data.cluster_name, nodegroupName=mock_random.get_random_string(), - **dict(NodegroupInputs.REQUIRED) + **dict(NodegroupInputs.REQUIRED), ) count_nodegroups_after_test = len( client.list_nodegroups(clusterName=generated_test_data.cluster_name)[ @@ -972,7 +972,7 @@ def test_create_fargate_profile_throws_exception_when_cluster_not_found(): client.create_fargate_profile( clusterName=non_existent_cluster_name, fargateProfileName=mock_random.get_random_string(), - **dict(FargateProfileInputs.REQUIRED) + **dict(FargateProfileInputs.REQUIRED), ) assert_expected_exception(raised_exception, expected_exception, expected_msg) @@ -990,7 +990,7 @@ def test_create_fargate_profile_throws_exception_when_fargate_profile_already_ex client.create_fargate_profile( clusterName=generated_test_data.cluster_name, fargateProfileName=generated_test_data.existing_fargate_profile_name, - **dict(FargateProfileInputs.REQUIRED) + **dict(FargateProfileInputs.REQUIRED), ) count_profiles_after_test = len( client.list_fargate_profiles(clusterName=generated_test_data.cluster_name)[ @@ -1019,7 +1019,7 @@ def test_create_fargate_profile_throws_exception_when_cluster_not_active( client.create_fargate_profile( clusterName=generated_test_data.cluster_name, fargateProfileName=mock_random.get_random_string(), - **dict(FargateProfileInputs.REQUIRED) + **dict(FargateProfileInputs.REQUIRED), ) count_fargate_profiles_after_test = len( client.list_fargate_profiles(clusterName=generated_test_data.cluster_name)[ diff --git a/tests/test_eks/test_eks_constants.py b/tests/test_eks/test_eks_constants.py index 1a0b5b9b3..873baa37c 100644 --- a/tests/test_eks/test_eks_constants.py +++ b/tests/test_eks/test_eks_constants.py @@ -1,6 +1,7 @@ """ This file should only contain constants used for the EKS tests. """ + import re from enum import Enum diff --git a/tests/test_elbv2/test_elbv2_listener_rules.py b/tests/test_elbv2/test_elbv2_listener_rules.py index 654c055b5..d9948ec13 100644 --- a/tests/test_elbv2/test_elbv2_listener_rules.py +++ b/tests/test_elbv2/test_elbv2_listener_rules.py @@ -55,7 +55,6 @@ def setup_listener(conn): def setup_target_group(boto_client): - ec2 = boto3.resource("ec2", region_name="us-east-1") vpc = ec2.create_vpc(CidrBlock="172.28.7.0/24", InstanceTenancy="default") diff --git a/tests/test_emr/test_emr_boto3.py b/tests/test_emr/test_emr_boto3.py index 8bd73940c..6a4dbdfb5 100644 --- a/tests/test_emr/test_emr_boto3.py +++ b/tests/test_emr/test_emr_boto3.py @@ -1091,7 +1091,6 @@ def test_tags(): @mock_aws def test_security_configurations(): - client = boto3.client("emr", region_name="us-east-1") security_configuration_name = "MySecurityConfiguration" diff --git a/tests/test_emr/test_emr_integration.py b/tests/test_emr/test_emr_integration.py index 6805ccc7d..db34c30da 100644 --- a/tests/test_emr/test_emr_integration.py +++ b/tests/test_emr/test_emr_integration.py @@ -87,7 +87,6 @@ def test_default_emr_security_groups_get_created_on_first_job_flow(): settings.TEST_SERVER_MODE, reason="Can't modify backend directly in server mode." ) class TestEmrSecurityGroupManager: - mocks = [] def setup_method(self): diff --git a/tests/test_emrcontainers/test_emrcontainers.py b/tests/test_emrcontainers/test_emrcontainers.py index 3652c28f1..aeab18a35 100644 --- a/tests/test_emrcontainers/test_emrcontainers.py +++ b/tests/test_emrcontainers/test_emrcontainers.py @@ -204,9 +204,7 @@ class TestListVirtualClusters: tomorrow = today + timedelta(days=1) @pytest.fixture(autouse=True) - def _setup_environment( - self, client, virtual_cluster_factory - ): # pylint: disable=unused-argument + def _setup_environment(self, client, virtual_cluster_factory): # pylint: disable=unused-argument self.client = client @pytest.mark.parametrize( @@ -435,9 +433,7 @@ class TestListJobRuns: tomorrow = today + timedelta(days=1) @pytest.fixture(autouse=True) - def _setup_environment( - self, client, virtual_cluster_factory, job_factory - ): # pylint: disable=unused-argument + def _setup_environment(self, client, virtual_cluster_factory, job_factory): # pylint: disable=unused-argument self.client = client self.virtual_cluster_id = virtual_cluster_factory[0] diff --git a/tests/test_emrcontainers/test_server.py b/tests/test_emrcontainers/test_server.py index 40b301a1a..cbf7a8ed1 100644 --- a/tests/test_emrcontainers/test_server.py +++ b/tests/test_emrcontainers/test_server.py @@ -1,4 +1,5 @@ """Test the different server responses.""" + import json import moto.server as server diff --git a/tests/test_emrserverless/test_emrserverless.py b/tests/test_emrserverless/test_emrserverless.py index 3c32ed540..c32f5b6d3 100644 --- a/tests/test_emrserverless/test_emrserverless.py +++ b/tests/test_emrserverless/test_emrserverless.py @@ -1,4 +1,5 @@ """Unit tests for emrserverless-supported APIs.""" + import re from contextlib import contextmanager from datetime import datetime, timezone diff --git a/tests/test_emrserverless/test_server.py b/tests/test_emrserverless/test_server.py index e2ba66d6b..de744d2ce 100644 --- a/tests/test_emrserverless/test_server.py +++ b/tests/test_emrserverless/test_server.py @@ -1,4 +1,5 @@ """Test different server responses.""" + import moto.server as server diff --git a/tests/test_es/test_es.py b/tests/test_es/test_es.py index b751a2b10..3ee124388 100644 --- a/tests/test_es/test_es.py +++ b/tests/test_es/test_es.py @@ -1,4 +1,5 @@ """Unit tests for es-supported APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_events/test_events_cloudformation.py b/tests/test_events/test_events_cloudformation.py index a6d1a5c0a..f9f808ea1 100644 --- a/tests/test_events/test_events_cloudformation.py +++ b/tests/test_events/test_events_cloudformation.py @@ -102,9 +102,9 @@ def test_update_archive(): cfn_client.create_stack(StackName=stack_name, TemplateBody=template) template_update = copy.deepcopy(json.loads(template)) - template_update["Resources"]["Archive"]["Properties"][ - "Description" - ] = "test archive" + template_update["Resources"]["Archive"]["Properties"]["Description"] = ( + "test archive" + ) # when cfn_client.update_stack( diff --git a/tests/test_events/test_events_lambdatriggers_integration.py b/tests/test_events/test_events_lambdatriggers_integration.py index 422df6b49..efe56b970 100644 --- a/tests/test_events/test_events_lambdatriggers_integration.py +++ b/tests/test_events/test_events_lambdatriggers_integration.py @@ -73,9 +73,9 @@ def test_creating_bucket__invokes_lambda(): log_group = f"/aws/lambda/{bucket_name}" msg_showed_up, all_logs = wait_for_log_msg(expected_msg, log_group, wait_time=5) - assert ( - msg_showed_up - ), "Lambda was not invoked after creating an S3 bucket. All logs: " + str(all_logs) + assert msg_showed_up, ( + "Lambda was not invoked after creating an S3 bucket. All logs: " + str(all_logs) + ) event = json.loads(list([line for line in all_logs if expected_msg in line])[-1]) diff --git a/tests/test_firehose/test_firehose_put.py b/tests/test_firehose/test_firehose_put.py index 58140f301..7a8170ae1 100644 --- a/tests/test_firehose/test_firehose_put.py +++ b/tests/test_firehose/test_firehose_put.py @@ -1,4 +1,5 @@ """Unit tests verifying put-related delivery stream APIs.""" + import boto3 from moto import mock_aws diff --git a/tests/test_firehose/test_firehose_tags.py b/tests/test_firehose/test_firehose_tags.py index daef6eea9..67e563a04 100644 --- a/tests/test_firehose/test_firehose_tags.py +++ b/tests/test_firehose/test_firehose_tags.py @@ -1,4 +1,5 @@ """Unit tests verifying tag-related delivery stream APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_glue/helpers.py b/tests/test_glue/helpers.py index 0ef161601..a0fb630e0 100644 --- a/tests/test_glue/helpers.py +++ b/tests/test_glue/helpers.py @@ -42,9 +42,9 @@ def create_table_input(database_name, table_name, columns=None, partition_keys=N table_input["Name"] = table_name table_input["PartitionKeys"] = partition_keys or [] table_input["StorageDescriptor"]["Columns"] = columns or [] - table_input["StorageDescriptor"][ - "Location" - ] = f"s3://my-bucket/{database_name}/{table_name}" + table_input["StorageDescriptor"]["Location"] = ( + f"s3://my-bucket/{database_name}/{table_name}" + ) return table_input diff --git a/tests/test_glue/test_glue.py b/tests/test_glue/test_glue.py index 2a767a07a..5b82638d7 100644 --- a/tests/test_glue/test_glue.py +++ b/tests/test_glue/test_glue.py @@ -1,4 +1,5 @@ """Unit tests for glue-supported APIs.""" + from random import randint from uuid import uuid4 diff --git a/tests/test_glue/test_schema_registry.py b/tests/test_glue/test_schema_registry.py index 1475b8727..c348a50a9 100644 --- a/tests/test_glue/test_schema_registry.py +++ b/tests/test_glue/test_schema_registry.py @@ -745,7 +745,6 @@ def test_get_schema_version_valid_input_version_number_latest_version(client): def test_get_schema_version_empty_input(client): - with pytest.raises(ClientError) as exc: client.get_schema_version() @@ -760,7 +759,6 @@ def test_get_schema_version_empty_input(client): def test_get_schema_version_invalid_schema_id_schema_version_number_both_provided( client, ): - with pytest.raises(ClientError) as exc: client.get_schema_version( SchemaId=TEST_SCHEMA_ID, diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index 558441dc8..7dbf5431a 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -170,7 +170,6 @@ def test_create_instance_profile_should_throw_when_name_is_not_unique(): @mock_aws def test_create_add_additional_roles_to_instance_profile_error(): - # Setup iam = boto3.client("iam", region_name="us-east-1") name = "test_profile" @@ -3931,10 +3930,13 @@ def test_role_config_client(): # Test non-aggregated pagination assert ( - config_client.list_discovered_resources( - resourceType="AWS::IAM::Role", limit=1, nextToken=result["nextToken"] - )["resourceIdentifiers"][0]["resourceId"] - ) != first_result + ( + config_client.list_discovered_resources( + resourceType="AWS::IAM::Role", limit=1, nextToken=result["nextToken"] + )["resourceIdentifiers"][0]["resourceId"] + ) + != first_result + ) # Test aggregated query - by `Limit=len(CONFIG_REGIONS)`, we should get a single policy duplicated across all regions agg_result = config_client.list_aggregate_discovered_resources( @@ -4381,10 +4383,13 @@ def test_policy_config_client(): # Test non-aggregated pagination assert ( - config_client.list_discovered_resources( - resourceType="AWS::IAM::Policy", limit=1, nextToken=result["nextToken"] - )["resourceIdentifiers"][0]["resourceId"] - ) != first_result + ( + config_client.list_discovered_resources( + resourceType="AWS::IAM::Policy", limit=1, nextToken=result["nextToken"] + )["resourceIdentifiers"][0]["resourceId"] + ) + != first_result + ) # Test aggregated query - by `Limit=len(CONFIG_REGIONS)`, we should get a single policy duplicated across all regions agg_result = config_client.list_aggregate_discovered_resources( diff --git a/tests/test_iam/test_iam_cloudformation.py b/tests/test_iam/test_iam_cloudformation.py index 7c7e3707d..68968870b 100644 --- a/tests/test_iam/test_iam_cloudformation.py +++ b/tests/test_iam/test_iam_cloudformation.py @@ -85,9 +85,7 @@ Resources: Type: AWS::IAM::User Properties: UserName: {0} -""".strip().format( - user_name - ) +""".strip().format(user_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -127,9 +125,7 @@ Resources: Type: AWS::IAM::User Properties: Path: {0} -""".strip().format( - path - ) +""".strip().format(path) cf_client.update_stack(StackName=stack_name, TemplateBody=template) @@ -166,9 +162,7 @@ Resources: Type: AWS::IAM::User Properties: UserName: {0} -""".strip().format( - new_user_name - ) +""".strip().format(new_user_name) cf_client.update_stack(StackName=stack_name, TemplateBody=template) @@ -253,9 +247,7 @@ Resources: Type: AWS::IAM::User Properties: UserName: {} -""".strip().format( - user_name - ) +""".strip().format(user_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -316,9 +308,7 @@ Outputs: Value: !Ref TheUser UserArn: Value: !GetAtt TheUser.Arn -""".strip().format( - user_name - ) +""".strip().format(user_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) stack_description = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0] @@ -404,9 +394,7 @@ Resources: - Effect: Allow Action: s3:* Resource: '*' -""".strip().format( - desc, name - ) +""".strip().format(desc, name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -451,9 +439,7 @@ Resources: Resource: '*' Groups: - {1} -""".strip().format( - desc, group_name - ) +""".strip().format(desc, group_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -499,9 +485,7 @@ Resources: Resource: '*' Users: - {1} -""".strip().format( - desc, user_name - ) +""".strip().format(desc, user_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -547,9 +531,7 @@ Resources: Resource: '*' Roles: - {1} -""".strip().format( - desc, role_name - ) +""".strip().format(desc, role_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -600,9 +582,7 @@ Resources: Resource: {1} Users: - {2} -""".strip().format( - policy_name, bucket_arn, user_name - ) +""".strip().format(policy_name, bucket_arn, user_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -650,9 +630,7 @@ Resources: Resource: {1} Users: - {2} -""".strip().format( - policy_name, bucket_arn, user_name_1 - ) +""".strip().format(policy_name, bucket_arn, user_name_1) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -683,9 +661,7 @@ Resources: Resource: {1} Users: - {2} -""".strip().format( - policy_name, bucket_arn, user_name_2 - ) +""".strip().format(policy_name, bucket_arn, user_name_2) cf_client.update_stack(StackName=stack_name, TemplateBody=template) @@ -734,9 +710,7 @@ Resources: Resource: {0} Users: - {1} -""".strip().format( - bucket_arn, user_name - ) +""".strip().format(bucket_arn, user_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -786,9 +760,7 @@ Resources: Resource: {1} Roles: - {2} -""".strip().format( - policy_name, bucket_arn, role_name - ) +""".strip().format(policy_name, bucket_arn, role_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -836,9 +808,7 @@ Resources: Resource: {1} Roles: - {2} -""".strip().format( - policy_name, bucket_arn, role_name_1 - ) +""".strip().format(policy_name, bucket_arn, role_name_1) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -869,9 +839,7 @@ Resources: Resource: {1} Roles: - {2} -""".strip().format( - policy_name, bucket_arn, role_name_2 - ) +""".strip().format(policy_name, bucket_arn, role_name_2) cf_client.update_stack(StackName=stack_name, TemplateBody=template) @@ -920,9 +888,7 @@ Resources: Resource: {0} Roles: - {1} -""".strip().format( - bucket_arn, role_name - ) +""".strip().format(bucket_arn, role_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -973,9 +939,7 @@ Resources: Resource: {1} Groups: - {2} -""".strip().format( - policy_name, bucket_arn, group_name - ) +""".strip().format(policy_name, bucket_arn, group_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -1023,9 +987,7 @@ Resources: Resource: {1} Groups: - {2} -""".strip().format( - policy_name, bucket_arn, group_name_1 - ) +""".strip().format(policy_name, bucket_arn, group_name_1) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -1056,9 +1018,7 @@ Resources: Resource: {1} Groups: - {2} -""".strip().format( - policy_name, bucket_arn, group_name_2 - ) +""".strip().format(policy_name, bucket_arn, group_name_2) cf_client.update_stack(StackName=stack_name, TemplateBody=template) @@ -1108,9 +1068,7 @@ Resources: Resource: {0} Groups: - {1} -""".strip().format( - bucket_arn, group_name - ) +""".strip().format(bucket_arn, group_name) cf_client.create_stack(StackName=stack_name, TemplateBody=template) @@ -1396,9 +1354,7 @@ Resources: Type: AWS::IAM::AccessKey Properties: UserName: {0} -""".strip().format( - other_user_name - ) +""".strip().format(other_user_name) cf_client.update_stack(StackName=stack_name, TemplateBody=template) diff --git a/tests/test_iot/test_iot_policies.py b/tests/test_iot/test_iot_policies.py index 1eb4ea5a8..e7772f4a9 100644 --- a/tests/test_iot/test_iot_policies.py +++ b/tests/test_iot/test_iot_policies.py @@ -60,7 +60,6 @@ def test_attach_policy_to_identity(region_name, iot_client, policy): def test_detach_policy(iot_client, policy): - cert = iot_client.create_keys_and_certificate(setAsActive=True) cert_arn = cert["certificateArn"] diff --git a/tests/test_kinesis/test_kinesis.py b/tests/test_kinesis/test_kinesis.py index b4e4755cf..e66c21dd2 100644 --- a/tests/test_kinesis/test_kinesis.py +++ b/tests/test_kinesis/test_kinesis.py @@ -142,7 +142,7 @@ def test_list_streams_stream_discription(): conn = boto3.client("kinesis", region_name="us-west-2") for i in range(3): - conn.create_stream(StreamName=f"stream{i}", ShardCount=i+1) + conn.create_stream(StreamName=f"stream{i}", ShardCount=i + 1) resp = conn.list_streams() assert len(resp["StreamSummaries"]) == 3 diff --git a/tests/test_kinesis/test_server.py b/tests/test_kinesis/test_server.py index 1ac1beb7d..17cdc334a 100644 --- a/tests/test_kinesis/test_server.py +++ b/tests/test_kinesis/test_server.py @@ -15,5 +15,5 @@ def test_list_streams(): assert json_data == { "HasMoreStreams": False, "StreamNames": [], - "StreamSummaries": [] + "StreamSummaries": [], } diff --git a/tests/test_kms/test_kms_grants.py b/tests/test_kms/test_kms_grants.py index 8d8012177..db8e881c0 100644 --- a/tests/test_kms/test_kms_grants.py +++ b/tests/test_kms/test_kms_grants.py @@ -120,7 +120,6 @@ def test_list_retirable_grants(): @mock_aws def test_revoke_grant(): - client = boto3.client("kms", region_name="us-east-1") key_id = create_key(client) @@ -156,7 +155,6 @@ def test_revoke_grant_raises_when_grant_does_not_exist(): @mock_aws def test_retire_grant_by_token(): - client = boto3.client("kms", region_name="us-east-1") key_id = create_key(client) @@ -175,7 +173,6 @@ def test_retire_grant_by_token(): @mock_aws def test_retire_grant_by_grant_id(): - client = boto3.client("kms", region_name="us-east-1") key_id = create_key(client) diff --git a/tests/test_lakeformation/test_lakeformation.py b/tests/test_lakeformation/test_lakeformation.py index 57b934e8f..c20c05d54 100644 --- a/tests/test_lakeformation/test_lakeformation.py +++ b/tests/test_lakeformation/test_lakeformation.py @@ -1,4 +1,5 @@ """Unit tests for lakeformation-supported APIs.""" + from typing import Dict, Optional import boto3 diff --git a/tests/test_lakeformation/test_resource_tags_integration.py b/tests/test_lakeformation/test_resource_tags_integration.py index 8a0342094..e0fb90184 100644 --- a/tests/test_lakeformation/test_resource_tags_integration.py +++ b/tests/test_lakeformation/test_resource_tags_integration.py @@ -368,9 +368,7 @@ def test_tag_lakeformation_columns( @pytest.mark.aws_verified @lakeformation_aws_verified -def test_lf_tags( - bucket_name=None, db_name=None, table_name=None, column_name=None -): # pylint: disable=unused-argument +def test_lf_tags(bucket_name=None, db_name=None, table_name=None, column_name=None): # pylint: disable=unused-argument client = boto3.client("lakeformation", region_name="eu-west-2") sts = boto3.client("sts", "eu-west-2") account_id = sts.get_caller_identity()["Account"] diff --git a/tests/test_logs/test_export_tasks.py b/tests/test_logs/test_export_tasks.py index 8fe721355..6bd95de57 100644 --- a/tests/test_logs/test_export_tasks.py +++ b/tests/test_logs/test_export_tasks.py @@ -122,9 +122,7 @@ def bucket_name(s3, account_id): # pylint: disable=redefined-outer-name @pytest.mark.aws_verified -def test_create_export_task_happy_path( - logs, s3, log_group_name, bucket_name -): # pylint: disable=redefined-outer-name +def test_create_export_task_happy_path(logs, s3, log_group_name, bucket_name): # pylint: disable=redefined-outer-name fromTime = 1611316574 to = 1642852574 resp = logs.create_export_task( @@ -144,7 +142,8 @@ def test_create_export_task_happy_path( @pytest.mark.aws_verified def test_create_export_task_raises_ClientError_when_bucket_not_found( - logs, log_group_name # pylint: disable=redefined-outer-name + logs, + log_group_name, # pylint: disable=redefined-outer-name ): destination = "368a7022dea3dd621" fromTime = 1611316574 @@ -166,7 +165,8 @@ def test_create_export_task_raises_ClientError_when_bucket_not_found( @pytest.mark.aws_verified def test_create_export_raises_ResourceNotFoundException_log_group_not_found( - logs, bucket_name # pylint: disable=redefined-outer-name + logs, + bucket_name, # pylint: disable=redefined-outer-name ): with pytest.raises(logs.exceptions.ResourceNotFoundException) as exc: logs.create_export_task( @@ -181,9 +181,7 @@ def test_create_export_raises_ResourceNotFoundException_log_group_not_found( @pytest.mark.aws_verified -def test_create_export_executes_export_task( - logs, s3, log_group_name, bucket_name -): # pylint: disable=redefined-outer-name +def test_create_export_executes_export_task(logs, s3, log_group_name, bucket_name): # pylint: disable=redefined-outer-name fromTime = int(unix_time_millis(datetime.now() - timedelta(days=1))) to = int(unix_time_millis(datetime.now() + timedelta(days=1))) @@ -217,9 +215,7 @@ def test_create_export_executes_export_task( assert "aws-logs-write-test" in key_names -def test_describe_export_tasks_happy_path( - logs, s3, log_group_name -): # pylint: disable=redefined-outer-name +def test_describe_export_tasks_happy_path(logs, s3, log_group_name): # pylint: disable=redefined-outer-name destination = "mybucket" fromTime = 1611316574 to = 1642852574 @@ -240,9 +236,7 @@ def test_describe_export_tasks_happy_path( assert resp["exportTasks"][0]["status"]["message"] == "Task is active" -def test_describe_export_tasks_task_id( - logs, log_group_name, bucket_name -): # pylint: disable=redefined-outer-name +def test_describe_export_tasks_task_id(logs, log_group_name, bucket_name): # pylint: disable=redefined-outer-name fromTime = 1611316574 to = 1642852574 resp = logs.create_export_task( diff --git a/tests/test_managedblockchain/test_managedblockchain_members.py b/tests/test_managedblockchain/test_managedblockchain_members.py index fe8842cff..297d2662f 100644 --- a/tests/test_managedblockchain/test_managedblockchain_members.py +++ b/tests/test_managedblockchain/test_managedblockchain_members.py @@ -505,9 +505,9 @@ def test_create_another_member_adminpassword(): ) # Too short - badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][ - "AdminPassword" - ] = "badap" + badadminpassmemberconf["FrameworkConfiguration"]["Fabric"]["AdminPassword"] = ( + "badap" + ) with pytest.raises(ParamValidationError) as ex: conn.create_member( NetworkId=network_id, @@ -521,9 +521,9 @@ def test_create_another_member_adminpassword(): ) # No uppercase or numbers - badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][ - "AdminPassword" - ] = "badadminpwd" + badadminpassmemberconf["FrameworkConfiguration"]["Fabric"]["AdminPassword"] = ( + "badadminpwd" + ) with pytest.raises(ClientError) as ex: conn.create_member( NetworkId=network_id, @@ -535,9 +535,9 @@ def test_create_another_member_adminpassword(): assert "Invalid request body" in err["Message"] # No lowercase or numbers - badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][ - "AdminPassword" - ] = "BADADMINPWD" + badadminpassmemberconf["FrameworkConfiguration"]["Fabric"]["AdminPassword"] = ( + "BADADMINPWD" + ) with pytest.raises(ClientError) as ex: conn.create_member( NetworkId=network_id, @@ -549,9 +549,9 @@ def test_create_another_member_adminpassword(): assert "Invalid request body" in err["Message"] # No numbers - badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][ - "AdminPassword" - ] = "badAdminpwd" + badadminpassmemberconf["FrameworkConfiguration"]["Fabric"]["AdminPassword"] = ( + "badAdminpwd" + ) with pytest.raises(ClientError) as ex: conn.create_member( NetworkId=network_id, @@ -563,9 +563,9 @@ def test_create_another_member_adminpassword(): assert "Invalid request body" in err["Message"] # Invalid character - badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][ - "AdminPassword" - ] = "badAdmin@pwd1" + badadminpassmemberconf["FrameworkConfiguration"]["Fabric"]["AdminPassword"] = ( + "badAdmin@pwd1" + ) with pytest.raises(ClientError) as ex: conn.create_member( NetworkId=network_id, diff --git a/tests/test_personalize/test_personalize_schema.py b/tests/test_personalize/test_personalize_schema.py index 9888ddf92..c121f82e4 100644 --- a/tests/test_personalize/test_personalize_schema.py +++ b/tests/test_personalize/test_personalize_schema.py @@ -1,4 +1,5 @@ """Unit tests for personalize-supported APIs.""" + import json import re diff --git a/tests/test_pinpoint/test_pinpoint.py b/tests/test_pinpoint/test_pinpoint.py index a532fccb2..bdf4cc042 100644 --- a/tests/test_pinpoint/test_pinpoint.py +++ b/tests/test_pinpoint/test_pinpoint.py @@ -1,4 +1,5 @@ """Unit tests for pinpoint-supported APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_pinpoint/test_pinpoint_application_tags.py b/tests/test_pinpoint/test_pinpoint_application_tags.py index fa11b30d2..a9058c5c7 100644 --- a/tests/test_pinpoint/test_pinpoint_application_tags.py +++ b/tests/test_pinpoint/test_pinpoint_application_tags.py @@ -1,4 +1,5 @@ """Unit tests for pinpoint-supported APIs.""" + import boto3 from moto import mock_aws diff --git a/tests/test_pinpoint/test_pinpoint_event_stream.py b/tests/test_pinpoint/test_pinpoint_event_stream.py index 57f85f580..2744d445f 100644 --- a/tests/test_pinpoint/test_pinpoint_event_stream.py +++ b/tests/test_pinpoint/test_pinpoint_event_stream.py @@ -1,4 +1,5 @@ """Unit tests for pinpoint-supported APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_polly/test_server.py b/tests/test_polly/test_server.py index cb5f6cba8..0c5977496 100644 --- a/tests/test_polly/test_server.py +++ b/tests/test_polly/test_server.py @@ -1,4 +1,5 @@ """Test the different server responses.""" + import moto.server as server from moto import mock_aws diff --git a/tests/test_quicksight/test_quicksight_groups.py b/tests/test_quicksight/test_quicksight_groups.py index 718f63465..802515172 100644 --- a/tests/test_quicksight/test_quicksight_groups.py +++ b/tests/test_quicksight/test_quicksight_groups.py @@ -1,4 +1,5 @@ """Unit tests for quicksight-supported APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_quicksight/test_quicksight_users.py b/tests/test_quicksight/test_quicksight_users.py index b1a80e038..4eb6da7ff 100644 --- a/tests/test_quicksight/test_quicksight_users.py +++ b/tests/test_quicksight/test_quicksight_users.py @@ -1,4 +1,5 @@ """Unit tests for quicksight-supported APIs.""" + import boto3 import pytest from botocore.exceptions import ClientError diff --git a/tests/test_redshift/test_server.py b/tests/test_redshift/test_server.py index 95c9c4dab..a55baa4bd 100644 --- a/tests/test_redshift/test_server.py +++ b/tests/test_redshift/test_server.py @@ -1,4 +1,5 @@ """Test the different server responses.""" + import json import re import unittest diff --git a/tests/test_redshiftdata/test_server.py b/tests/test_redshiftdata/test_server.py index 3cce643e2..88d184aec 100644 --- a/tests/test_redshiftdata/test_server.py +++ b/tests/test_redshiftdata/test_server.py @@ -1,4 +1,5 @@ """Test different server responses.""" + import json import unittest from uuid import UUID diff --git a/tests/test_rekognition/test_rekognition.py b/tests/test_rekognition/test_rekognition.py index bf3d20a00..94239fcd5 100644 --- a/tests/test_rekognition/test_rekognition.py +++ b/tests/test_rekognition/test_rekognition.py @@ -1,4 +1,5 @@ """Unit tests for rekognition-supported APIs.""" + import random import string diff --git a/tests/test_resourcegroupstaggingapi/test_resourcegroupstaggingapi.py b/tests/test_resourcegroupstaggingapi/test_resourcegroupstaggingapi.py index 7ffbe5459..905bab5bc 100644 --- a/tests/test_resourcegroupstaggingapi/test_resourcegroupstaggingapi.py +++ b/tests/test_resourcegroupstaggingapi/test_resourcegroupstaggingapi.py @@ -10,7 +10,6 @@ from tests.test_ds.test_ds_simple_ad_directory import create_test_directory @mock_aws def test_get_resources_cloudformation(): - template = { "AWSTemplateFormatVersion": "2010-09-09", "Resources": {"test": {"Type": "AWS::S3::Bucket"}}, @@ -135,7 +134,6 @@ def test_get_resources_backup(): @mock_aws def test_get_resources_ecs(): - # ecs:cluster client = boto3.client("ecs", region_name="us-east-1") cluster_one = ( @@ -882,6 +880,7 @@ def test_get_resources_sns(): "Tags" ] + @mock_aws def test_get_resources_ssm(): import json @@ -899,16 +898,16 @@ def test_get_resources_ssm(): Name="TestDocument", DocumentType="Command", DocumentFormat="JSON", - Tags=[{"Key": 'testing', "Value": "testingValue"}], + Tags=[{"Key": "testing", "Value": "testingValue"}], ) rtapi = boto3.client("resourcegroupstaggingapi", region_name="us-east-1") resp = rtapi.get_resources(ResourceTypeFilters=["ssm"]) assert len(resp["ResourceTagMappingList"]) == 1 - assert {"Key": 'testing', "Value": "testingValue"} in resp["ResourceTagMappingList"][0][ - "Tags" - ] + assert {"Key": "testing", "Value": "testingValue"} in resp[ + "ResourceTagMappingList" + ][0]["Tags"] @mock_aws diff --git a/tests/test_resourcegroupstaggingapi/test_server.py b/tests/test_resourcegroupstaggingapi/test_server.py index 9ee1cc2c4..0cbca2dca 100644 --- a/tests/test_resourcegroupstaggingapi/test_server.py +++ b/tests/test_resourcegroupstaggingapi/test_server.py @@ -1,4 +1,5 @@ """Test the different server responses.""" + import moto.server as server diff --git a/tests/test_route53/test_route53.py b/tests/test_route53/test_route53.py index 2c692eb4a..821cfa4fe 100644 --- a/tests/test_route53/test_route53.py +++ b/tests/test_route53/test_route53.py @@ -905,7 +905,7 @@ def test_change_resource_record_set__delete_should_match_create(): "Action": "DELETE", "ResourceRecordSet": { "Name": name, - "Type": "A" + "Type": "A", # Missing TTL and ResourceRecords }, } diff --git a/tests/test_route53domains/test_route53domains_domain.py b/tests/test_route53domains/test_route53domains_domain.py index 891329b32..3b6998f1a 100644 --- a/tests/test_route53domains/test_route53domains_domain.py +++ b/tests/test_route53domains/test_route53domains_domain.py @@ -77,9 +77,9 @@ def test_register_domain(domain_parameters: Dict): if operation["OperationId"] == operation_id: return - assert operation_id in [ - operation["OperationId"] for operation in operations - ], "Could not find expected operation id returned from `register_domain` in operation list" + assert ( + operation_id in [operation["OperationId"] for operation in operations] + ), "Could not find expected operation id returned from `register_domain` in operation list" @mock_aws diff --git a/tests/test_s3/test_server.py b/tests/test_s3/test_server.py index e7cedeceb..958718627 100644 --- a/tests/test_s3/test_server.py +++ b/tests/test_s3/test_server.py @@ -192,7 +192,10 @@ def test_s3_server_post_unicode_bucket_key(): """Verify non-ascii characters in request URLs (e.g., S3 object names).""" dispatcher = server.DomainDispatcherApplication(server.create_backend_app) backend_app = dispatcher.get_application( - {"HTTP_HOST": "s3.amazonaws.com", "PATH_INFO": "/test-bucket/test-object-てすと"} + { + "HTTP_HOST": "s3.amazonaws.com", + "PATH_INFO": "/test-bucket/test-object-てすと", + } ) assert backend_app backend_app = dispatcher.get_application( diff --git a/tests/test_sagemaker/test_sagemaker_experiment.py b/tests/test_sagemaker/test_sagemaker_experiment.py index d22fb0984..97073c8df 100644 --- a/tests/test_sagemaker/test_sagemaker_experiment.py +++ b/tests/test_sagemaker/test_sagemaker_experiment.py @@ -30,7 +30,6 @@ def test_create_experiment(sagemaker_client): def test_list_experiments(sagemaker_client): - experiment_names = [f"some-experiment-name-{i}" for i in range(10)] for experiment_name in experiment_names: diff --git a/tests/test_sagemaker/test_sagemaker_feature_groups.py b/tests/test_sagemaker/test_sagemaker_feature_groups.py index d6a82901d..142054137 100644 --- a/tests/test_sagemaker/test_sagemaker_feature_groups.py +++ b/tests/test_sagemaker/test_sagemaker_feature_groups.py @@ -1,4 +1,5 @@ """Unit tests for sagemaker-supported APIs.""" + import re from datetime import datetime diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py index 6da6e034c..bc1541232 100644 --- a/tests/test_secretsmanager/test_secretsmanager.py +++ b/tests/test_secretsmanager/test_secretsmanager.py @@ -687,8 +687,9 @@ def test_describe_secret_with_KmsKeyId(): secret_description = conn.describe_secret(SecretId=results["ARN"]) assert secret_description["KmsKeyId"] == "dummy_arn" - assert conn.list_secrets()["SecretList"][0]["KmsKeyId"] == ( - secret_description["KmsKeyId"] + assert ( + conn.list_secrets()["SecretList"][0]["KmsKeyId"] + == (secret_description["KmsKeyId"]) ) diff --git a/tests/test_secretsmanager/test_server.py b/tests/test_secretsmanager/test_server.py index 0d318ce2c..d6b5ae8ec 100644 --- a/tests/test_secretsmanager/test_server.py +++ b/tests/test_secretsmanager/test_server.py @@ -20,7 +20,6 @@ def skip_in_server_mode(): @mock_aws def test_get_secret_value(): - backend = server.create_backend_app("secretsmanager") test_client = backend.test_client() @@ -42,7 +41,6 @@ def test_get_secret_value(): @mock_aws def test_get_secret_that_does_not_exist(): - backend = server.create_backend_app("secretsmanager") test_client = backend.test_client() @@ -102,7 +100,6 @@ def test_get_secret_that_has_no_value(): @mock_aws def test_create_secret(): - backend = server.create_backend_app("secretsmanager") test_client = backend.test_client() @@ -128,7 +125,6 @@ def test_create_secret(): @mock_aws def test_describe_secret(): - backend = server.create_backend_app("secretsmanager") test_client = backend.test_client() @@ -167,7 +163,6 @@ def test_describe_secret(): @mock_aws def test_describe_secret_that_does_not_exist(): - backend = server.create_backend_app("secretsmanager") test_client = backend.test_client() @@ -184,7 +179,6 @@ def test_describe_secret_that_does_not_exist(): @mock_aws def test_describe_secret_that_does_not_match(): - backend = server.create_backend_app("secretsmanager") test_client = backend.test_client() @@ -723,7 +717,6 @@ def test_can_list_secret_version_ids(): @mock_aws def test_get_resource_policy_secret(): - backend = server.create_backend_app("secretsmanager") test_client = backend.test_client() diff --git a/tests/test_ses/test_ses_boto3.py b/tests/test_ses/test_ses_boto3.py index dfcd8ec6f..ffe0ab706 100644 --- a/tests/test_ses/test_ses_boto3.py +++ b/tests/test_ses/test_ses_boto3.py @@ -1127,11 +1127,13 @@ def test_update_receipt_rule_actions(): assert "S3Action" in updated_rule_description["Rule"]["Actions"][0] assert ( - updated_rule_description["Rule"]["Actions"][0]["S3Action"]["TopicArn"] - ) == "newString" + (updated_rule_description["Rule"]["Actions"][0]["S3Action"]["TopicArn"]) + == "newString" + ) assert ( - updated_rule_description["Rule"]["Actions"][0]["S3Action"]["BucketName"] - ) == "updatedTestBucketName" + (updated_rule_description["Rule"]["Actions"][0]["S3Action"]["BucketName"]) + == "updatedTestBucketName" + ) assert updated_rule_description["Rule"]["Actions"][0]["S3Action"][ "ObjectKeyPrefix" ] == ("updatedTestObjectKeyPrefix") @@ -1147,11 +1149,17 @@ def test_update_receipt_rule_actions(): == "newString" ) assert ( - updated_rule_description["Rule"]["Actions"][0]["BounceAction"]["SmtpReplyCode"] - ) == "newString" + ( + updated_rule_description["Rule"]["Actions"][0]["BounceAction"][ + "SmtpReplyCode" + ] + ) + == "newString" + ) assert ( - updated_rule_description["Rule"]["Actions"][0]["BounceAction"]["StatusCode"] - ) == "newString" + (updated_rule_description["Rule"]["Actions"][0]["BounceAction"]["StatusCode"]) + == "newString" + ) assert ( updated_rule_description["Rule"]["Actions"][0]["BounceAction"]["Message"] == "newString" @@ -1357,9 +1365,9 @@ def test_update_ses_template(): template["SubjectPart"] = "Hi, {{name}}!" template["TextPart"] = "Dear {{name}},\r\n Your favorite color is {{color}}" - template[ - "HtmlPart" - ] = "

Hello {{name}},

Your favorite color is {{color}}

" + template["HtmlPart"] = ( + "

Hello {{name}},

Your favorite color is {{color}}

" + ) conn.update_template(Template=template) result = conn.get_template(TemplateName=template["TemplateName"]) @@ -1529,8 +1537,9 @@ def test_get_identity_mail_from_domain_attributes(): assert len(attributes["MailFromDomainAttributes"]) == 1 assert len(attributes["MailFromDomainAttributes"]["bar@foo.com"]) == 1 assert ( - attributes["MailFromDomainAttributes"]["bar@foo.com"]["BehaviorOnMXFailure"] - ) == "UseDefaultValue" + (attributes["MailFromDomainAttributes"]["bar@foo.com"]["BehaviorOnMXFailure"]) + == "UseDefaultValue" + ) # Must return multiple configured identities conn.verify_domain_identity(Domain="lorem.com") diff --git a/tests/test_special_cases/test_custom_amis.py b/tests/test_special_cases/test_custom_amis.py index 2980e5ec7..82f19c175 100644 --- a/tests/test_special_cases/test_custom_amis.py +++ b/tests/test_special_cases/test_custom_amis.py @@ -2,6 +2,7 @@ This test lives on its own as it requires moto to be imported after setting of MOTO_AMIS_PATH env var, as per ec2 models documentation """ + import importlib import json import os diff --git a/tests/test_sqs/test_server.py b/tests/test_sqs/test_server.py index 92730db45..8d4435d78 100644 --- a/tests/test_sqs/test_server.py +++ b/tests/test_sqs/test_server.py @@ -18,7 +18,6 @@ def test_sqs_list_identities(): # See: https://github.com/getmoto/moto/issues/866 for queue_name in ("testqueue", "otherqueue.fifo"): - res = test_client.put(f"/?Action=CreateQueue&QueueName={queue_name}") res = test_client.put( diff --git a/tests/test_sqs/test_sqs.py b/tests/test_sqs/test_sqs.py index 05d531f34..7b6993fe5 100644 --- a/tests/test_sqs/test_sqs.py +++ b/tests/test_sqs/test_sqs.py @@ -1023,7 +1023,6 @@ def test_change_message_visibility_than_permitted(): ) with freeze_time("2015-01-01 12:05:00"): - with pytest.raises(ClientError) as err: conn.change_message_visibility( QueueUrl=queue.url, @@ -2923,7 +2922,6 @@ def test_send_message_fails_when_message_size_greater_than_max_message_size(): def test_fifo_queue_deduplication_with_id( msg_1, msg_2, dedupid_1, dedupid_2, expected_count ): - sqs = boto3.resource("sqs", region_name=REGION) q_name = str(uuid4())[0:6] msg_queue = sqs.create_queue( @@ -2946,7 +2944,6 @@ def test_fifo_queue_deduplication_with_id( "msg_1, msg_2, expected_count", [("msg1", "msg1", 1), ("msg1", "msg2", 2)] ) def test_fifo_queue_deduplication_withoutid(msg_1, msg_2, expected_count): - sqs = boto3.resource("sqs", region_name=REGION) q_name = str(uuid4())[0:6] msg_queue = sqs.create_queue( @@ -2983,7 +2980,6 @@ def test_fifo_queue_send_duplicate_messages_after_deduplication_time_limit(): @mock_aws def test_fifo_queue_send_deduplicationid_same_as_sha256_of_old_message(): - sqs = boto3.resource("sqs", region_name=REGION) q_name = str(uuid4())[0:6] msg_queue = sqs.create_queue( @@ -3006,7 +3002,6 @@ def test_fifo_queue_send_deduplicationid_same_as_sha256_of_old_message(): @mock_aws def test_fifo_send_message_when_same_group_id_is_in_dlq(): - sqs = boto3.resource("sqs", region_name=REGION) q_name = f"{str(uuid4())[0:6]}-dlq.fifo" dlq = sqs.create_queue( diff --git a/tests/test_sqs/test_sqs_multiaccount.py b/tests/test_sqs/test_sqs_multiaccount.py index 4dbe81589..bf9689ff4 100644 --- a/tests/test_sqs/test_sqs_multiaccount.py +++ b/tests/test_sqs/test_sqs_multiaccount.py @@ -9,7 +9,6 @@ from moto import mock_aws class TestStsAssumeRole(unittest.TestCase): @mock_aws def test_list_queues_in_different_account(self): - sqs = boto3.client("sqs", region_name="us-east-1") queue_url = sqs.create_queue(QueueName=str(uuid4()))["QueueUrl"] diff --git a/tests/test_ssm/test_ssm_docs.py b/tests/test_ssm/test_ssm_docs.py index 7b7fb9224..28ce79852 100644 --- a/tests/test_ssm/test_ssm_docs.py +++ b/tests/test_ssm/test_ssm_docs.py @@ -28,7 +28,6 @@ def _validate_document_description( expected_default_version, expected_format, ): - if expected_format == "JSON": assert doc_description["Hash"] == ( hashlib.sha256(json.dumps(json_doc).encode("utf-8")).hexdigest() diff --git a/tests/test_stepfunctions/parser/__init__.py b/tests/test_stepfunctions/parser/__init__.py index c6f57f99c..eb8c59407 100644 --- a/tests/test_stepfunctions/parser/__init__.py +++ b/tests/test_stepfunctions/parser/__init__.py @@ -79,7 +79,6 @@ def verify_execution_result( if execution["status"] == expected_status: result = _verify_result(client, execution, execution_arn) if result is not False: - client.delete_state_machine(stateMachineArn=state_machine_arn) iam.delete_role_policy( RoleName=role_name, PolicyName="allowLambdaInvoke" diff --git a/tests/test_stepfunctions/parser/test_stepfunctions_sqs_integration.py b/tests/test_stepfunctions/parser/test_stepfunctions_sqs_integration.py index 393425e32..c472aed30 100644 --- a/tests/test_stepfunctions/parser/test_stepfunctions_sqs_integration.py +++ b/tests/test_stepfunctions/parser/test_stepfunctions_sqs_integration.py @@ -25,7 +25,6 @@ def test_state_machine_calling_sqs_with_heartbeat(): tmpl_name = "services/sqs_heartbeat" def _verify_result(client, execution, execution_arn): - resp = sqs.receive_message(QueueUrl=queue_url) if "Messages" in resp: task_token = json.loads(resp["Messages"][0]["Body"])["TaskToken"] @@ -57,7 +56,6 @@ def test_state_machine_calling_sqs_with_task_success(): tmpl_name = "services/sqs_heartbeat" def _verify_result(client, execution, execution_arn): - resp = sqs.receive_message(QueueUrl=queue_url) if "Messages" in resp: task_token = json.loads(resp["Messages"][0]["Body"])["TaskToken"] diff --git a/tests/test_stepfunctions/test_stepfunctions.py b/tests/test_stepfunctions/test_stepfunctions.py index 203e6b1f4..afeb80cf4 100644 --- a/tests/test_stepfunctions/test_stepfunctions.py +++ b/tests/test_stepfunctions/test_stepfunctions.py @@ -75,12 +75,12 @@ def test_state_machine_creation_fails_with_invalid_names(): "uni\u0007code", "uni\u0008code", "uni\u0009code", - "uni\u000Acode", - "uni\u000Bcode", - "uni\u000Ccode", - "uni\u000Dcode", - "uni\u000Ecode", - "uni\u000Fcode", + "uni\u000acode", + "uni\u000bcode", + "uni\u000ccode", + "uni\u000dcode", + "uni\u000ecode", + "uni\u000fcode", "uni\u0010code", "uni\u0011code", "uni\u0012code", @@ -91,13 +91,13 @@ def test_state_machine_creation_fails_with_invalid_names(): "uni\u0017code", "uni\u0018code", "uni\u0019code", - "uni\u001Acode", - "uni\u001Bcode", - "uni\u001Ccode", - "uni\u001Dcode", - "uni\u001Ecode", - "uni\u001Fcode", - "uni\u007Fcode", + "uni\u001acode", + "uni\u001bcode", + "uni\u001ccode", + "uni\u001dcode", + "uni\u001ecode", + "uni\u001fcode", + "uni\u007fcode", "uni\u0080code", "uni\u0081code", "uni\u0082code", @@ -108,12 +108,12 @@ def test_state_machine_creation_fails_with_invalid_names(): "uni\u0087code", "uni\u0088code", "uni\u0089code", - "uni\u008Acode", - "uni\u008Bcode", - "uni\u008Ccode", - "uni\u008Dcode", - "uni\u008Ecode", - "uni\u008Fcode", + "uni\u008acode", + "uni\u008bcode", + "uni\u008ccode", + "uni\u008dcode", + "uni\u008ecode", + "uni\u008fcode", "uni\u0090code", "uni\u0091code", "uni\u0092code", @@ -124,12 +124,12 @@ def test_state_machine_creation_fails_with_invalid_names(): "uni\u0097code", "uni\u0098code", "uni\u0099code", - "uni\u009Acode", - "uni\u009Bcode", - "uni\u009Ccode", - "uni\u009Dcode", - "uni\u009Ecode", - "uni\u009Fcode", + "uni\u009acode", + "uni\u009bcode", + "uni\u009ccode", + "uni\u009dcode", + "uni\u009ecode", + "uni\u009fcode", ] # diff --git a/tests/test_sts/test_sts.py b/tests/test_sts/test_sts.py index 083932c13..4e5f3f570 100644 --- a/tests/test_sts/test_sts.py +++ b/tests/test_sts/test_sts.py @@ -199,9 +199,7 @@ def test_assume_role_with_saml(): -""".replace( - "\n", "" - ) +""".replace("\n", "") assume_role_response = client.assume_role_with_saml( RoleArn=role_input, @@ -297,9 +295,7 @@ def test_assume_role_with_saml_should_not_rely_on_attribute_order(): -""".replace( - "\n", "" - ) +""".replace("\n", "") assume_role_response = client.assume_role_with_saml( RoleArn=role_input, @@ -383,9 +379,7 @@ def test_assume_role_with_saml_should_respect_xml_namespaces(): -""".replace( - "\n", "" - ) +""".replace("\n", "") assume_role_response = client.assume_role_with_saml( RoleArn=role_input, @@ -480,9 +474,7 @@ def test_assume_role_with_saml_when_xml_tag_contains_xmlns_attributes(): -""".replace( - "\n", "" - ) +""".replace("\n", "") assume_role_response = client.assume_role_with_saml( RoleArn=role_input, @@ -568,9 +560,7 @@ def test_assume_role_with_saml_when_saml_attribute_not_provided(): -""".replace( - "\n", "" - ) +""".replace("\n", "") assume_role_response = client.assume_role_with_saml( RoleArn=role_input, diff --git a/tests/test_sts/test_sts_integration.py b/tests/test_sts/test_sts_integration.py index 8fe1d7084..fa870cc70 100644 --- a/tests/test_sts/test_sts_integration.py +++ b/tests/test_sts/test_sts_integration.py @@ -111,9 +111,7 @@ class TestStsAssumeRole(unittest.TestCase): - """.replace( - "\n", "" - ) + """.replace("\n", "") assume_role_response = self.sts.assume_role_with_saml( RoleArn=role_input, diff --git a/tests/test_swf/models/test_workflow_execution.py b/tests/test_swf/models/test_workflow_execution.py index 3f01c9445..ed705e1ea 100644 --- a/tests/test_swf/models/test_workflow_execution.py +++ b/tests/test_swf/models/test_workflow_execution.py @@ -596,7 +596,6 @@ def test_start_timer(): wfe = make_workflow_execution() START_TIMER_EVENT_ATTRIBUTES = {"startToFireTimeout": "10", "timerId": "abc123"} with patch("moto.swf.models.workflow_execution.ThreadingTimer"): - wfe.start_timer(123, START_TIMER_EVENT_ATTRIBUTES) last_event = wfe.events()[-1] diff --git a/tests/test_swf/utils.py b/tests/test_swf/utils.py index 1e24ef85f..fd91775dd 100644 --- a/tests/test_swf/utils.py +++ b/tests/test_swf/utils.py @@ -24,9 +24,9 @@ SCHEDULE_ACTIVITY_TASK_DECISION = { }, } for key, value in ACTIVITY_TASK_TIMEOUTS.items(): - SCHEDULE_ACTIVITY_TASK_DECISION["scheduleActivityTaskDecisionAttributes"][ - key - ] = value + SCHEDULE_ACTIVITY_TASK_DECISION["scheduleActivityTaskDecisionAttributes"][key] = ( + value + ) # A test Domain diff --git a/tests/test_transcribe/test_transcribe_boto3.py b/tests/test_transcribe/test_transcribe_boto3.py index 0a72d8293..6b3470726 100644 --- a/tests/test_transcribe/test_transcribe_boto3.py +++ b/tests/test_transcribe/test_transcribe_boto3.py @@ -6,7 +6,6 @@ from moto import mock_aws @mock_aws def test_run_medical_transcription_job_minimal_params(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -26,8 +25,9 @@ def test_run_medical_transcription_job_minimal_params(): resp = client.get_medical_transcription_job(MedicalTranscriptionJobName=job_name) assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 transcription_job = resp["MedicalTranscriptionJob"] - assert transcription_job["MedicalTranscriptionJobName"] == ( - args["MedicalTranscriptionJobName"] + assert ( + transcription_job["MedicalTranscriptionJobName"] + == (args["MedicalTranscriptionJobName"]) ) assert transcription_job["TranscriptionJobStatus"] == "QUEUED" assert transcription_job["LanguageCode"] == args["LanguageCode"] @@ -74,7 +74,6 @@ def test_run_medical_transcription_job_minimal_params(): @mock_aws def test_run_medical_transcription_job_all_params(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -116,8 +115,9 @@ def test_run_medical_transcription_job_all_params(): resp = client.get_medical_transcription_job(MedicalTranscriptionJobName=job_name) assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 transcription_job = resp["MedicalTranscriptionJob"] - assert transcription_job["MedicalTranscriptionJobName"] == ( - args["MedicalTranscriptionJobName"] + assert ( + transcription_job["MedicalTranscriptionJobName"] + == (args["MedicalTranscriptionJobName"]) ) assert transcription_job["TranscriptionJobStatus"] == "QUEUED" assert transcription_job["LanguageCode"] == args["LanguageCode"] @@ -126,23 +126,29 @@ def test_run_medical_transcription_job_all_params(): assert "StartTime" not in transcription_job assert "CompletionTime" not in transcription_job assert "Transcript" not in transcription_job - assert transcription_job["Settings"]["ShowSpeakerLabels"] == ( - args["Settings"]["ShowSpeakerLabels"] + assert ( + transcription_job["Settings"]["ShowSpeakerLabels"] + == (args["Settings"]["ShowSpeakerLabels"]) ) - assert transcription_job["Settings"]["MaxSpeakerLabels"] == ( - args["Settings"]["MaxSpeakerLabels"] + assert ( + transcription_job["Settings"]["MaxSpeakerLabels"] + == (args["Settings"]["MaxSpeakerLabels"]) ) - assert transcription_job["Settings"]["ChannelIdentification"] == ( - args["Settings"]["ChannelIdentification"] + assert ( + transcription_job["Settings"]["ChannelIdentification"] + == (args["Settings"]["ChannelIdentification"]) ) - assert transcription_job["Settings"]["ShowAlternatives"] == ( - args["Settings"]["ShowAlternatives"] + assert ( + transcription_job["Settings"]["ShowAlternatives"] + == (args["Settings"]["ShowAlternatives"]) ) - assert transcription_job["Settings"]["MaxAlternatives"] == ( - args["Settings"]["MaxAlternatives"] + assert ( + transcription_job["Settings"]["MaxAlternatives"] + == (args["Settings"]["MaxAlternatives"]) ) - assert transcription_job["Settings"]["VocabularyName"] == ( - args["Settings"]["VocabularyName"] + assert ( + transcription_job["Settings"]["VocabularyName"] + == (args["Settings"]["VocabularyName"]) ) assert transcription_job["Specialty"] == args["Specialty"] @@ -176,7 +182,6 @@ def test_run_medical_transcription_job_all_params(): @mock_aws def test_run_transcription_job_all_params(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -230,23 +235,29 @@ def test_run_transcription_job_all_params(): assert "StartTime" not in transcription_job assert "CompletionTime" not in transcription_job assert "Transcript" not in transcription_job - assert transcription_job["Settings"]["ShowSpeakerLabels"] == ( - args["Settings"]["ShowSpeakerLabels"] + assert ( + transcription_job["Settings"]["ShowSpeakerLabels"] + == (args["Settings"]["ShowSpeakerLabels"]) ) - assert transcription_job["Settings"]["MaxSpeakerLabels"] == ( - args["Settings"]["MaxSpeakerLabels"] + assert ( + transcription_job["Settings"]["MaxSpeakerLabels"] + == (args["Settings"]["MaxSpeakerLabels"]) ) - assert transcription_job["Settings"]["ChannelIdentification"] == ( - args["Settings"]["ChannelIdentification"] + assert ( + transcription_job["Settings"]["ChannelIdentification"] + == (args["Settings"]["ChannelIdentification"]) ) - assert transcription_job["Settings"]["ShowAlternatives"] == ( - args["Settings"]["ShowAlternatives"] + assert ( + transcription_job["Settings"]["ShowAlternatives"] + == (args["Settings"]["ShowAlternatives"]) ) - assert transcription_job["Settings"]["MaxAlternatives"] == ( - args["Settings"]["MaxAlternatives"] + assert ( + transcription_job["Settings"]["MaxAlternatives"] + == (args["Settings"]["MaxAlternatives"]) ) - assert transcription_job["Settings"]["VocabularyName"] == ( - args["Settings"]["VocabularyName"] + assert ( + transcription_job["Settings"]["VocabularyName"] + == (args["Settings"]["VocabularyName"]) ) # IN_PROGRESS resp = client.get_transcription_job(TranscriptionJobName=job_name) @@ -288,7 +299,6 @@ def test_run_transcription_job_all_params(): @mock_aws def test_run_transcription_job_minimal_params(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -358,7 +368,6 @@ def test_run_transcription_job_minimal_params(): @mock_aws def test_run_transcription_job_s3output_params(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -438,7 +447,6 @@ def test_run_transcription_job_s3output_params(): @mock_aws def test_run_transcription_job_identify_languages_params(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -528,7 +536,6 @@ def test_get_nonexistent_transcription_job(): @mock_aws def test_run_medical_transcription_job_with_existing_job_name(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -550,7 +557,6 @@ def test_run_medical_transcription_job_with_existing_job_name(): @mock_aws def test_run_transcription_job_with_existing_job_name(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -569,7 +575,6 @@ def test_run_transcription_job_with_existing_job_name(): @mock_aws def test_run_medical_transcription_job_nonexistent_vocabulary(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -589,7 +594,6 @@ def test_run_medical_transcription_job_nonexistent_vocabulary(): @mock_aws def test_run_transcription_job_nonexistent_vocabulary(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -607,7 +611,6 @@ def test_run_transcription_job_nonexistent_vocabulary(): @mock_aws def test_list_medical_transcription_jobs(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -699,7 +702,6 @@ def test_list_medical_transcription_jobs(): @mock_aws def test_list_transcription_jobs(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -783,7 +785,6 @@ def test_list_transcription_jobs(): @mock_aws def test_create_medical_vocabulary(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -818,7 +819,6 @@ def test_create_medical_vocabulary(): @mock_aws def test_create_vocabulary(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -873,7 +873,6 @@ def test_create_vocabulary(): @mock_aws def test_list_vocabularies(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -956,7 +955,6 @@ def test_list_vocabularies(): @mock_aws def test_list_medical_vocabularies(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -1060,7 +1058,6 @@ def test_get_nonexistent_vocabulary(): @mock_aws def test_create_medical_vocabulary_with_existing_vocabulary_name(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -1079,7 +1076,6 @@ def test_create_medical_vocabulary_with_existing_vocabulary_name(): @mock_aws def test_create_vocabulary_with_existing_vocabulary_name(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) @@ -1098,7 +1094,6 @@ def test_create_vocabulary_with_existing_vocabulary_name(): @mock_aws def test_create_vocabulary_with_bad_request(): - region_name = "us-east-1" client = boto3.client("transcribe", region_name=region_name) diff --git a/tests/test_wafv2/test_wafv2.py b/tests/test_wafv2/test_wafv2.py index 019eb430f..56aacd9f3 100644 --- a/tests/test_wafv2/test_wafv2.py +++ b/tests/test_wafv2/test_wafv2.py @@ -10,7 +10,6 @@ from .test_helper_functions import CREATE_WEB_ACL_BODY, LIST_WEB_ACL_BODY @mock_aws def test_create_web_acl(): - conn = boto3.client("wafv2", region_name="us-east-1") res = conn.create_web_acl(**CREATE_WEB_ACL_BODY("John", "REGIONAL")) web_acl = res["Summary"]