Techdebt: MyPy: Remove unused ignores (#6542)

This commit is contained in:
Bert Blommers 2023-07-20 15:46:54 +00:00 committed by GitHub
parent 774ff7cf6e
commit e26cfe7dc6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
67 changed files with 224 additions and 180 deletions

View File

@ -116,7 +116,7 @@ class PrometheusServiceBackend(BaseBackend):
self.workspaces.pop(workspace_id, None) self.workspaces.pop(workspace_id, None)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore @paginate(pagination_model=PAGINATION_MODEL) # type: ignore
def list_workspaces(self, alias: str) -> List[Workspace]: # type: ignore[misc] def list_workspaces(self, alias: str) -> List[Workspace]:
if alias: if alias:
return [w for w in self.workspaces.values() if w.alias == alias] return [w for w in self.workspaces.values() if w.alias == alias]
return list(self.workspaces.values()) return list(self.workspaces.values())
@ -173,7 +173,9 @@ class PrometheusServiceBackend(BaseBackend):
return ns return ns
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore @paginate(pagination_model=PAGINATION_MODEL) # type: ignore
def list_rule_groups_namespaces(self, name: str, workspace_id: str) -> List[RuleGroupNamespace]: # type: ignore def list_rule_groups_namespaces(
self, name: str, workspace_id: str
) -> List[RuleGroupNamespace]:
ws = self.describe_workspace(workspace_id) ws = self.describe_workspace(workspace_id)
if name: if name:
return [ return [

View File

@ -674,7 +674,7 @@ class Stage(BaseModel):
self.tracing_enabled = self._str2bool(op["value"]) self.tracing_enabled = self._str2bool(op["value"])
elif op["path"].startswith("/accessLogSettings/"): elif op["path"].startswith("/accessLogSettings/"):
self.access_log_settings = self.access_log_settings or {} self.access_log_settings = self.access_log_settings or {}
self.access_log_settings[op["path"].split("/")[-1]] = op["value"] # type: ignore[index] self.access_log_settings[op["path"].split("/")[-1]] = op["value"]
else: else:
# (e.g., path could be '/*/*/logging/loglevel') # (e.g., path could be '/*/*/logging/loglevel')
split_path = op["path"].split("/", 3) split_path = op["path"].split("/", 3)
@ -2309,11 +2309,11 @@ class APIGatewayBackend(BaseBackend):
self.base_path_mappings[domain_name] = {} self.base_path_mappings[domain_name] = {}
else: else:
if ( if (
self.base_path_mappings[domain_name].get(new_base_path) # type: ignore[arg-type] self.base_path_mappings[domain_name].get(new_base_path)
and new_base_path != "(none)" and new_base_path != "(none)"
): ):
raise BasePathConflictException() raise BasePathConflictException()
self.base_path_mappings[domain_name][new_base_path] = new_base_path_mapping # type: ignore[index] self.base_path_mappings[domain_name][new_base_path] = new_base_path_mapping
return new_base_path_mapping return new_base_path_mapping
def get_base_path_mappings(self, domain_name: str) -> List[BasePathMapping]: def get_base_path_mappings(self, domain_name: str) -> List[BasePathMapping]:

View File

@ -429,7 +429,9 @@ class APIGatewayResponse(BaseResponse):
self.backend.delete_stage(function_id, stage_name) self.backend.delete_stage(function_id, stage_name)
return 202, {}, "{}" return 202, {}, "{}"
def export(self, request: Any, full_url: str, headers: Dict[str, str]) -> TYPE_RESPONSE: # type: ignore[return] def export(
self, request: Any, full_url: str, headers: Dict[str, str]
) -> TYPE_RESPONSE:
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
url_path_parts = self.path.split("/") url_path_parts = self.path.split("/")
rest_api_id = url_path_parts[-5] rest_api_id = url_path_parts[-5]

View File

@ -236,7 +236,7 @@ class AppSyncResponse(BaseResponse):
) )
return 200, {}, json.dumps(dict(type=graphql_type)) return 200, {}, json.dumps(dict(type=graphql_type))
def get_introspection_schema(self) -> TYPE_RESPONSE: # type: ignore[return] def get_introspection_schema(self) -> TYPE_RESPONSE:
api_id = self.path.split("/")[-2] api_id = self.path.split("/")[-2]
format_ = self.querystring.get("format")[0] # type: ignore[index] format_ = self.querystring.get("format")[0] # type: ignore[index]
if self.querystring.get("includeDirectives"): if self.querystring.get("includeDirectives"):

View File

@ -339,7 +339,7 @@ class AthenaBackend(BaseBackend):
return data_catalog return data_catalog
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore @paginate(pagination_model=PAGINATION_MODEL) # type: ignore
def list_named_queries(self, work_group: str) -> List[str]: # type: ignore[misc] def list_named_queries(self, work_group: str) -> List[str]:
named_query_ids = [ named_query_ids = [
q.id for q in self.named_queries.values() if q.workgroup.name == work_group q.id for q in self.named_queries.values() if q.workgroup.name == work_group
] ]

View File

@ -276,9 +276,7 @@ class LayerVersion(CloudFormationModel):
self.code_size, self.code_size,
self.code_sha_256, self.code_sha_256,
self.code_digest, self.code_digest,
) = _s3_content( ) = _s3_content(key)
key
) # type: ignore[assignment]
@property @property
def arn(self) -> str: def arn(self) -> str:
@ -755,9 +753,7 @@ class LambdaFunction(CloudFormationModel, DockerModel):
self.code_size, self.code_size,
self.code_sha_256, self.code_sha_256,
self.code_digest, self.code_digest,
) = _s3_content( ) = _s3_content(key)
key
) # type: ignore[assignment]
self.code["S3Bucket"] = updated_spec["S3Bucket"] self.code["S3Bucket"] = updated_spec["S3Bucket"]
self.code["S3Key"] = updated_spec["S3Key"] self.code["S3Key"] = updated_spec["S3Key"]
@ -1027,7 +1023,7 @@ class LambdaFunction(CloudFormationModel, DockerModel):
def create_url_config(self, config: Dict[str, Any]) -> "FunctionUrlConfig": def create_url_config(self, config: Dict[str, Any]) -> "FunctionUrlConfig":
self.url_config = FunctionUrlConfig(function=self, config=config) self.url_config = FunctionUrlConfig(function=self, config=config)
return self.url_config # type: ignore[return-value] return self.url_config
def delete_url_config(self) -> None: def delete_url_config(self) -> None:
self.url_config = None self.url_config = None
@ -1606,7 +1602,9 @@ class LambdaBackend(BaseBackend):
self._layers = LayerStorage() self._layers = LayerStorage()
@staticmethod @staticmethod
def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, str]]: # type: ignore[misc] def default_vpc_endpoint_service(
service_region: str, zones: List[str]
) -> List[Dict[str, str]]:
"""Default VPC endpoint service.""" """Default VPC endpoint service."""
return BaseBackend.default_vpc_endpoint_service_factory( return BaseBackend.default_vpc_endpoint_service_factory(
service_region, zones, "lambda" service_region, zones, "lambda"
@ -1920,7 +1918,7 @@ class LambdaBackend(BaseBackend):
] ]
} }
func = self._lambdas.get_function_by_name_or_arn(function_name, qualifier) func = self._lambdas.get_function_by_name_or_arn(function_name, qualifier)
func.invoke(json.dumps(event), {}, {}) # type: ignore[union-attr] func.invoke(json.dumps(event), {}, {})
def send_dynamodb_items( def send_dynamodb_items(
self, function_arn: str, items: List[Any], source: str self, function_arn: str, items: List[Any], source: str

View File

@ -124,7 +124,9 @@ class LambdaResponse(BaseResponse):
@amz_crc32 @amz_crc32
@amzn_request_id @amzn_request_id
def invoke(self, request: Any, full_url: str, headers: Any) -> Tuple[int, Dict[str, str], Union[str, bytes]]: # type: ignore[misc] def invoke(
self, request: Any, full_url: str, headers: Any
) -> Tuple[int, Dict[str, str], Union[str, bytes]]:
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
if request.method == "POST": if request.method == "POST":
return self._invoke(request) return self._invoke(request)
@ -133,7 +135,9 @@ class LambdaResponse(BaseResponse):
@amz_crc32 @amz_crc32
@amzn_request_id @amzn_request_id
def invoke_async(self, request: Any, full_url: str, headers: Any) -> Tuple[int, Dict[str, str], Union[str, bytes]]: # type: ignore[misc] def invoke_async(
self, request: Any, full_url: str, headers: Any
) -> Tuple[int, Dict[str, str], Union[str, bytes]]:
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
if request.method == "POST": if request.method == "POST":
return self._invoke_async() return self._invoke_async()

View File

@ -161,7 +161,7 @@ class FakeStackSet(BaseModel):
self.instances.create_instances( self.instances.create_instances(
accounts, accounts,
regions, regions,
parameters, # type: ignore[arg-type] parameters,
deployment_targets or {}, deployment_targets or {},
permission_model=self.permission_model, permission_model=self.permission_model,
) )

View File

@ -277,7 +277,7 @@ def resource_class_from_type(resource_type: str) -> Type[CloudFormationModel]:
logger.warning("No Moto CloudFormation support for %s", resource_type) logger.warning("No Moto CloudFormation support for %s", resource_type)
return None # type: ignore[return-value] return None # type: ignore[return-value]
return get_model_map()[resource_type] # type: ignore[return-value] return get_model_map()[resource_type]
def resource_name_property_from_type(resource_type: str) -> Optional[str]: def resource_name_property_from_type(resource_type: str) -> Optional[str]:
@ -720,11 +720,8 @@ class ResourceMap(collections_abc.Mapping): # type: ignore[type-arg]
instance = self[resource] instance = self[resource]
if isinstance(instance, TaggedEC2Resource): if isinstance(instance, TaggedEC2Resource):
self.tags["aws:cloudformation:logical-id"] = resource self.tags["aws:cloudformation:logical-id"] = resource
ec2_models.ec2_backends[self._account_id][ backend = ec2_models.ec2_backends[self._account_id][self._region_name]
self._region_name backend.create_tags([instance.physical_resource_id], self.tags)
].create_tags(
[instance.physical_resource_id], self.tags
) # type: ignore[attr-defined]
if instance and not instance.is_created(): if instance and not instance.is_created():
all_resources_ready = False all_resources_ready = False
return all_resources_ready return all_resources_ready
@ -921,7 +918,7 @@ class OutputMap(collections_abc.Mapping): # type: ignore[type-arg]
return iter(self.outputs) return iter(self.outputs)
def __len__(self) -> int: def __len__(self) -> int:
return len(self._output_json_map) # type: ignore[arg-type] return len(self._output_json_map)
@property @property
def outputs(self) -> Iterable[str]: def outputs(self) -> Iterable[str]:
@ -931,7 +928,7 @@ class OutputMap(collections_abc.Mapping): # type: ignore[type-arg]
def exports(self) -> List["Export"]: def exports(self) -> List["Export"]:
exports = [] exports = []
if self.outputs: if self.outputs:
for value in self._output_json_map.values(): # type: ignore[union-attr] for value in self._output_json_map.values():
if value.get("Export"): if value.get("Export"):
cleaned_name = clean_json( cleaned_name = clean_json(
value["Export"].get("Name"), self._resource_map value["Export"].get("Name"), self._resource_map

View File

@ -962,7 +962,7 @@ class CognitoIdpBackend(BaseBackend):
} }
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_user_pools(self) -> List[CognitoIdpUserPool]: # type: ignore[misc] def list_user_pools(self) -> List[CognitoIdpUserPool]:
return list(self.user_pools.values()) return list(self.user_pools.values())
def describe_user_pool(self, user_pool_id: str) -> CognitoIdpUserPool: def describe_user_pool(self, user_pool_id: str) -> CognitoIdpUserPool:
@ -1035,7 +1035,9 @@ class CognitoIdpBackend(BaseBackend):
return user_pool_client return user_pool_client
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_user_pool_clients(self, user_pool_id: str) -> List[CognitoIdpUserPoolClient]: # type: ignore[misc] def list_user_pool_clients(
self, user_pool_id: str
) -> List[CognitoIdpUserPoolClient]:
user_pool = self.describe_user_pool(user_pool_id) user_pool = self.describe_user_pool(user_pool_id)
return list(user_pool.clients.values()) return list(user_pool.clients.values())
@ -1082,7 +1084,9 @@ class CognitoIdpBackend(BaseBackend):
return identity_provider return identity_provider
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_identity_providers(self, user_pool_id: str) -> List[CognitoIdpIdentityProvider]: # type: ignore[misc] def list_identity_providers(
self, user_pool_id: str
) -> List[CognitoIdpIdentityProvider]:
user_pool = self.describe_user_pool(user_pool_id) user_pool = self.describe_user_pool(user_pool_id)
return list(user_pool.identity_providers.values()) return list(user_pool.identity_providers.values())
@ -1148,7 +1152,7 @@ class CognitoIdpBackend(BaseBackend):
return user_pool.groups[group_name] return user_pool.groups[group_name]
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_groups(self, user_pool_id: str) -> List[CognitoIdpGroup]: # type: ignore[misc] def list_groups(self, user_pool_id: str) -> List[CognitoIdpGroup]:
user_pool = self.describe_user_pool(user_pool_id) user_pool = self.describe_user_pool(user_pool_id)
return list(user_pool.groups.values()) return list(user_pool.groups.values())
@ -1189,7 +1193,9 @@ class CognitoIdpBackend(BaseBackend):
user.groups.add(group) user.groups.add(group)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_users_in_group(self, user_pool_id: str, group_name: str) -> List[CognitoIdpUser]: # type: ignore[misc] def list_users_in_group(
self, user_pool_id: str, group_name: str
) -> List[CognitoIdpUser]:
user_pool = self.describe_user_pool(user_pool_id) user_pool = self.describe_user_pool(user_pool_id)
group = self.get_group(user_pool_id, group_name) group = self.get_group(user_pool_id, group_name)
return list(filter(lambda user: user in group.users, user_pool.users.values())) return list(filter(lambda user: user in group.users, user_pool.users.values()))
@ -1325,7 +1331,7 @@ class CognitoIdpBackend(BaseBackend):
raise NotAuthorizedError("Invalid token") raise NotAuthorizedError("Invalid token")
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_users(self, user_pool_id: str) -> List[CognitoIdpUser]: # type: ignore[misc] def list_users(self, user_pool_id: str) -> List[CognitoIdpUser]:
user_pool = self.describe_user_pool(user_pool_id) user_pool = self.describe_user_pool(user_pool_id)
return list(user_pool.users.values()) return list(user_pool.users.values())
@ -1801,11 +1807,11 @@ class CognitoIdpBackend(BaseBackend):
if client.generate_secret: if client.generate_secret:
secret_hash: str = auth_parameters.get("SECRET_HASH") # type: ignore[assignment] secret_hash: str = auth_parameters.get("SECRET_HASH") # type: ignore[assignment]
if not check_secret_hash( if not check_secret_hash(
client.secret, client.id, username, secret_hash # type: ignore[arg-type] client.secret, client.id, username, secret_hash
): ):
raise NotAuthorizedError(secret_hash) # type: ignore[arg-type] raise NotAuthorizedError(secret_hash)
user = self.admin_get_user(user_pool.id, username) # type: ignore[arg-type] user = self.admin_get_user(user_pool.id, username)
if user.status is UserStatus.UNCONFIRMED: if user.status is UserStatus.UNCONFIRMED:
raise UserNotConfirmedException("User is not confirmed.") raise UserNotConfirmedException("User is not confirmed.")

View File

@ -84,7 +84,7 @@ CAMEL_TO_SNAKE_REGEX = re.compile(r"(?<!^)(?=[A-Z])")
MAX_TAGS_IN_ARG = 50 MAX_TAGS_IN_ARG = 50
MANAGED_RULES = load_resource(__name__, "resources/aws_managed_rules.json") MANAGED_RULES = load_resource(__name__, "resources/aws_managed_rules.json")
MANAGED_RULES_CONSTRAINTS = MANAGED_RULES["ManagedRules"] # type: ignore[index] MANAGED_RULES_CONSTRAINTS = MANAGED_RULES["ManagedRules"]
def datetime2int(date: datetime) -> int: def datetime2int(date: datetime) -> int:
@ -844,12 +844,12 @@ class ConfigRule(ConfigEmptyDictable):
def validate_managed_rule(self) -> None: def validate_managed_rule(self) -> None:
"""Validate parameters specific to managed rules.""" """Validate parameters specific to managed rules."""
rule_info = MANAGED_RULES_CONSTRAINTS[self.source.source_identifier] # type: ignore[index] rule_info = MANAGED_RULES_CONSTRAINTS[self.source.source_identifier]
param_names = self.input_parameters_dict.keys() param_names = self.input_parameters_dict.keys()
# Verify input parameter names are actual parameters for the rule ID. # Verify input parameter names are actual parameters for the rule ID.
if param_names: if param_names:
allowed_names = {x["Name"] for x in rule_info["Parameters"]} # type: ignore[index] allowed_names = {x["Name"] for x in rule_info["Parameters"]}
if not set(param_names).issubset(allowed_names): if not set(param_names).issubset(allowed_names):
raise InvalidParameterValueException( raise InvalidParameterValueException(
f"Unknown parameters provided in the inputParameters: {self.input_parameters}" f"Unknown parameters provided in the inputParameters: {self.input_parameters}"
@ -857,7 +857,7 @@ class ConfigRule(ConfigEmptyDictable):
# Verify all the required parameters are specified. # Verify all the required parameters are specified.
required_names = { required_names = {
x["Name"] for x in rule_info["Parameters"] if not x["Optional"] # type: ignore[index] x["Name"] for x in rule_info["Parameters"] if not x["Optional"]
} }
diffs = required_names.difference(set(param_names)) diffs = required_names.difference(set(param_names))
if diffs: if diffs:
@ -938,12 +938,12 @@ class ConfigBackend(BaseBackend):
# Verify that each entry exists in the supported list: # Verify that each entry exists in the supported list:
bad_list = [] bad_list = []
for resource in resource_list: for resource in resource_list:
if resource not in self.config_schema.shapes["ResourceType"]["enum"]: # type: ignore[index] if resource not in self.config_schema.shapes["ResourceType"]["enum"]:
bad_list.append(resource) bad_list.append(resource)
if bad_list: if bad_list:
raise InvalidResourceTypeException( raise InvalidResourceTypeException(
bad_list, self.config_schema.shapes["ResourceType"]["enum"] # type: ignore[index] bad_list, self.config_schema.shapes["ResourceType"]["enum"]
) )
def _validate_delivery_snapshot_properties( def _validate_delivery_snapshot_properties(
@ -957,11 +957,11 @@ class ConfigBackend(BaseBackend):
# Verify that the deliveryFrequency is set to an acceptable value: # Verify that the deliveryFrequency is set to an acceptable value:
if ( if (
properties.get("deliveryFrequency", None) properties.get("deliveryFrequency", None)
not in self.config_schema.shapes["MaximumExecutionFrequency"]["enum"] # type: ignore[index] not in self.config_schema.shapes["MaximumExecutionFrequency"]["enum"]
): ):
raise InvalidDeliveryFrequency( raise InvalidDeliveryFrequency(
properties.get("deliveryFrequency", None), properties.get("deliveryFrequency", None),
self.config_schema.shapes["MaximumExecutionFrequency"]["enum"], # type: ignore[index] self.config_schema.shapes["MaximumExecutionFrequency"]["enum"],
) )
def put_configuration_aggregator( def put_configuration_aggregator(

View File

@ -240,7 +240,7 @@ class AccountSpecificBackend(Dict[str, SERVICE_BACKEND]):
super().__setitem__(key, value) super().__setitem__(key, value)
@lru_cache() @lru_cache()
def __getitem__(self, region_name: str) -> SERVICE_BACKEND: # type: ignore[override] def __getitem__(self, region_name: str) -> SERVICE_BACKEND:
if region_name in self.keys(): if region_name in self.keys():
return super().__getitem__(region_name) return super().__getitem__(region_name)
# Create the backend for a specific region # Create the backend for a specific region

View File

@ -12,7 +12,7 @@ from moto.utilities.distutils_version import LooseVersion
try: try:
from importlib.metadata import version from importlib.metadata import version
except ImportError: except ImportError:
from importlib_metadata import version # type: ignore[no-redef] from importlib_metadata import version
RESPONSES_VERSION = version("responses") RESPONSES_VERSION = version("responses")
@ -163,7 +163,7 @@ def get_response_mock() -> responses.RequestsMock:
) )
else: else:
responses_mock = responses.RequestsMock(assert_all_requests_are_fired=False) responses_mock = responses.RequestsMock(assert_all_requests_are_fired=False)
responses_mock._find_match = types.MethodType(_find_first_match, responses_mock) # type: ignore[assignment] responses_mock._find_match = types.MethodType(_find_first_match, responses_mock) # type: ignore[method-assign]
responses_mock.add_passthru("http") responses_mock.add_passthru("http")
return responses_mock return responses_mock

View File

@ -64,7 +64,7 @@ class RESTError(HTTPException):
if template in self.templates.keys(): if template in self.templates.keys():
env = Environment(loader=DictLoader(self.templates)) env = Environment(loader=DictLoader(self.templates))
self.description: str = env.get_template(template).render( # type: ignore self.description: str = env.get_template(template).render(
error_type=error_type, error_type=error_type,
message=message, message=message,
request_id_tag=self.request_id_tag_name, request_id_tag=self.request_id_tag_name,
@ -88,7 +88,7 @@ class RESTError(HTTPException):
def get_body( def get_body(
self, *args: Any, **kwargs: Any # pylint: disable=unused-argument self, *args: Any, **kwargs: Any # pylint: disable=unused-argument
) -> str: ) -> str:
return self.description # type: ignore[return-value] return self.description
def to_json(self) -> "JsonRESTError": def to_json(self) -> "JsonRESTError":
err = JsonRESTError(error_type=self.error_type, message=self.message) err = JsonRESTError(error_type=self.error_type, message=self.message)

View File

@ -271,9 +271,9 @@ def patch_client(client: botocore.client.BaseClient) -> None:
if isinstance(client, botocore.client.BaseClient): if isinstance(client, botocore.client.BaseClient):
# Check if our event handler was already registered # Check if our event handler was already registered
try: try:
event_emitter = client._ruleset_resolver._event_emitter._emitter # type: ignore event_emitter = client._ruleset_resolver._event_emitter._emitter
all_handlers = event_emitter._handlers._root["children"] # type: ignore all_handlers = event_emitter._handlers._root["children"]
handler_trie = list(all_handlers["before-send"].values())[1] # type: ignore handler_trie = list(all_handlers["before-send"].values())[1]
handlers_list = handler_trie.first + handler_trie.middle + handler_trie.last handlers_list = handler_trie.first + handler_trie.middle + handler_trie.last
if botocore_stubber in handlers_list: if botocore_stubber in handlers_list:
# No need to patch - this client already has the botocore_stubber registered # No need to patch - this client already has the botocore_stubber registered

View File

@ -604,7 +604,7 @@ class BaseResponse(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if len(parts) != 2 or parts[1] != "member": if len(parts) != 2 or parts[1] != "member":
value_dict[parts[0]] = value_dict.pop(k) value_dict[parts[0]] = value_dict.pop(k)
else: else:
value_dict = list(value_dict.values())[0] # type: ignore[assignment] value_dict = list(value_dict.values())[0]
return value_dict return value_dict

View File

@ -135,7 +135,9 @@ class DataBrewBackend(BaseBackend):
recipe.update(recipe_description, recipe_steps) recipe.update(recipe_description, recipe_steps)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_recipes(self, recipe_version: Optional[str] = None) -> List["FakeRecipeVersion"]: # type: ignore[misc] def list_recipes(
self, recipe_version: Optional[str] = None
) -> List["FakeRecipeVersion"]:
# https://docs.aws.amazon.com/databrew/latest/dg/API_ListRecipes.html # https://docs.aws.amazon.com/databrew/latest/dg/API_ListRecipes.html
if recipe_version == FakeRecipe.LATEST_WORKING: if recipe_version == FakeRecipe.LATEST_WORKING:
version = "latest_working" version = "latest_working"
@ -150,7 +152,7 @@ class DataBrewBackend(BaseBackend):
return [r for r in recipes if r is not None] return [r for r in recipes if r is not None]
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_recipe_versions(self, recipe_name: str) -> List["FakeRecipeVersion"]: # type: ignore[misc] def list_recipe_versions(self, recipe_name: str) -> List["FakeRecipeVersion"]:
# https://docs.aws.amazon.com/databrew/latest/dg/API_ListRecipeVersions.html # https://docs.aws.amazon.com/databrew/latest/dg/API_ListRecipeVersions.html
self.validate_length(recipe_name, "name", 255) self.validate_length(recipe_name, "name", 255)
@ -254,7 +256,7 @@ class DataBrewBackend(BaseBackend):
return self.rulesets[ruleset_name] return self.rulesets[ruleset_name]
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_rulesets(self) -> List["FakeRuleset"]: # type: ignore[misc] def list_rulesets(self) -> List["FakeRuleset"]:
return list(self.rulesets.values()) return list(self.rulesets.values())
def delete_ruleset(self, ruleset_name: str) -> None: def delete_ruleset(self, ruleset_name: str) -> None:
@ -289,7 +291,7 @@ class DataBrewBackend(BaseBackend):
return dataset return dataset
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_datasets(self) -> List["FakeDataset"]: # type: ignore[misc] def list_datasets(self) -> List["FakeDataset"]:
return list(self.datasets.values()) return list(self.datasets.values())
def update_dataset( def update_dataset(
@ -406,7 +408,9 @@ class DataBrewBackend(BaseBackend):
return self.update_job(**kwargs) return self.update_job(**kwargs)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_jobs(self, dataset_name: Optional[str] = None, project_name: Optional[str] = None) -> List["FakeJob"]: # type: ignore[misc] def list_jobs(
self, dataset_name: Optional[str] = None, project_name: Optional[str] = None
) -> List["FakeJob"]:
# https://docs.aws.amazon.com/databrew/latest/dg/API_ListJobs.html # https://docs.aws.amazon.com/databrew/latest/dg/API_ListJobs.html
if dataset_name is not None: if dataset_name is not None:
self.validate_length(dataset_name, "datasetName", 255) self.validate_length(dataset_name, "datasetName", 255)
@ -473,7 +477,7 @@ class FakeRecipe(BaseModel):
def publish(self, description: Optional[str] = None) -> None: def publish(self, description: Optional[str] = None) -> None:
self.latest_published = self.latest_working self.latest_published = self.latest_working
self.latest_working = deepcopy(self.latest_working) self.latest_working = deepcopy(self.latest_working)
self.latest_published.publish(description) # type: ignore[attr-defined] self.latest_published.publish(description)
del self.versions[self.latest_working.version] del self.versions[self.latest_working.version]
self.versions[self.latest_published.version] = self.latest_published self.versions[self.latest_published.version] = self.latest_published
self.latest_working.version = self.latest_published.version + 0.1 self.latest_working.version = self.latest_published.version + 0.1
@ -626,7 +630,7 @@ class FakeJob(BaseModel, metaclass=BaseModelABCMeta): # type: ignore[misc]
@property @property
@abstractmethod @abstractmethod
def local_attrs(self) -> List[str]: # type: ignore[misc] def local_attrs(self) -> List[str]:
raise NotImplementedError raise NotImplementedError
def __init__(self, account_id: str, region_name: str, **kwargs: Any): def __init__(self, account_id: str, region_name: str, **kwargs: Any):

View File

@ -36,7 +36,7 @@ class DataBrewResponse(BaseResponse):
) )
@amzn_request_id @amzn_request_id
def delete_recipe_version(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return,misc] def delete_recipe_version(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return]
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
# https://docs.aws.amazon.com/databrew/latest/dg/API_DeleteRecipeVersion.html # https://docs.aws.amazon.com/databrew/latest/dg/API_DeleteRecipeVersion.html
if request.method == "DELETE": if request.method == "DELETE":
@ -76,7 +76,7 @@ class DataBrewResponse(BaseResponse):
) )
@amzn_request_id @amzn_request_id
def list_recipe_versions(self, request: Any, full_url: str, headers: Any) -> str: # type: ignore[return,misc] def list_recipe_versions(self, request: Any, full_url: str, headers: Any) -> str:
# https://docs.aws.amazon.com/databrew/latest/dg/API_ListRecipeVersions.html # https://docs.aws.amazon.com/databrew/latest/dg/API_ListRecipeVersions.html
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
recipe_name = self._get_param("Name", self._get_param("name")) recipe_name = self._get_param("Name", self._get_param("name"))
@ -97,7 +97,7 @@ class DataBrewResponse(BaseResponse):
) )
@amzn_request_id @amzn_request_id
def publish_recipe(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return,misc] def publish_recipe(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return]
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
if request.method == "POST": if request.method == "POST":
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)
@ -126,7 +126,7 @@ class DataBrewResponse(BaseResponse):
return 200, {}, json.dumps(recipe.as_dict()) return 200, {}, json.dumps(recipe.as_dict())
@amzn_request_id @amzn_request_id
def recipe_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[misc,return] def recipe_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return]
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)
@ -178,7 +178,7 @@ class DataBrewResponse(BaseResponse):
return 200, {}, json.dumps({"Name": ruleset_name}) return 200, {}, json.dumps({"Name": ruleset_name})
@amzn_request_id @amzn_request_id
def ruleset_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[misc,return] def ruleset_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return]
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)
@ -283,7 +283,7 @@ class DataBrewResponse(BaseResponse):
return 200, {}, json.dumps(dataset.as_dict()) return 200, {}, json.dumps(dataset.as_dict())
@amzn_request_id @amzn_request_id
def dataset_response(self, request: Any, full_url: str, headers: Any) -> Union[str, TYPE_RESPONSE]: # type: ignore[misc,return] def dataset_response(self, request: Any, full_url: str, headers: Any) -> Union[str, TYPE_RESPONSE]: # type: ignore[return]
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)
@ -304,7 +304,7 @@ class DataBrewResponse(BaseResponse):
# region Jobs # region Jobs
@amzn_request_id @amzn_request_id
def list_jobs(self, request: Any, full_url: str, headers: Any) -> str: # type: ignore[misc,return] def list_jobs(self, request: Any, full_url: str, headers: Any) -> str:
# https://docs.aws.amazon.com/databrew/latest/dg/API_ListJobs.html # https://docs.aws.amazon.com/databrew/latest/dg/API_ListJobs.html
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
dataset_name = self._get_param("datasetName") dataset_name = self._get_param("datasetName")
@ -337,7 +337,7 @@ class DataBrewResponse(BaseResponse):
return 200, {}, json.dumps({"Name": job_name}) return 200, {}, json.dumps({"Name": job_name})
@amzn_request_id @amzn_request_id
def job_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[misc,return] def job_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore[return]
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)
@ -428,7 +428,7 @@ class DataBrewResponse(BaseResponse):
return json.dumps(self.databrew_backend.update_recipe_job(**kwargs).as_dict()) return json.dumps(self.databrew_backend.update_recipe_job(**kwargs).as_dict())
@amzn_request_id @amzn_request_id
def profile_job_response(self, request: Any, full_url: str, headers: Any) -> str: # type: ignore[misc,return] def profile_job_response(self, request: Any, full_url: str, headers: Any) -> str: # type: ignore[return]
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)
@ -438,7 +438,7 @@ class DataBrewResponse(BaseResponse):
return self.update_profile_job_response(job_name) return self.update_profile_job_response(job_name)
@amzn_request_id @amzn_request_id
def recipe_job_response(self, request: Any, full_url: str, headers: Any) -> str: # type: ignore[misc,return] def recipe_job_response(self, request: Any, full_url: str, headers: Any) -> str: # type: ignore[return]
self.setup_class(request, full_url, headers) self.setup_class(request, full_url, headers)
parsed_url = urlparse(full_url) parsed_url = urlparse(full_url)

View File

@ -216,7 +216,7 @@ class DAXBackend(BaseBackend):
return self.clusters[cluster_name] return self.clusters[cluster_name]
@paginate(PAGINATION_MODEL) # type: ignore[misc] @paginate(PAGINATION_MODEL) # type: ignore[misc]
def describe_clusters(self, cluster_names: Iterable[str]) -> List[DaxCluster]: # type: ignore[misc] def describe_clusters(self, cluster_names: Iterable[str]) -> List[DaxCluster]:
clusters = self.clusters clusters = self.clusters
if not cluster_names: if not cluster_names:
cluster_names = clusters.keys() cluster_names = clusters.keys()

View File

@ -477,7 +477,9 @@ class DirectoryServiceBackend(BaseBackend):
directory.enable_sso(True) directory.enable_sso(True)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def describe_directories(self, directory_ids: Optional[List[str]] = None) -> List[Directory]: # type: ignore[misc] def describe_directories(
self, directory_ids: Optional[List[str]] = None
) -> List[Directory]:
"""Return info on all directories or directories with matching IDs.""" """Return info on all directories or directories with matching IDs."""
for directory_id in directory_ids or self.directories: for directory_id in directory_ids or self.directories:
self._validate_directory_id(directory_id) self._validate_directory_id(directory_id)
@ -531,7 +533,7 @@ class DirectoryServiceBackend(BaseBackend):
self.tagger.untag_resource_using_names(resource_id, tag_keys) self.tagger.untag_resource_using_names(resource_id, tag_keys)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_tags_for_resource(self, resource_id: str) -> List[Dict[str, str]]: # type: ignore[misc] def list_tags_for_resource(self, resource_id: str) -> List[Dict[str, str]]:
"""List all tags on a directory.""" """List all tags on a directory."""
self._validate_directory_id(resource_id) self._validate_directory_id(resource_id)
return self.tagger.list_tags_for_resource(resource_id).get("Tags") # type: ignore[return-value] return self.tagger.list_tags_for_resource(resource_id).get("Tags") # type: ignore[return-value]

View File

@ -154,7 +154,7 @@ class DirectoryServiceResponse(BaseResponse):
next_token = self._get_param("NextToken") next_token = self._get_param("NextToken")
limit = self._get_param("Limit") limit = self._get_param("Limit")
try: try:
(tags, next_token) = self.ds_backend.list_tags_for_resource( tags, next_token = self.ds_backend.list_tags_for_resource(
resource_id=resource_id, next_token=next_token, limit=limit resource_id=resource_id, next_token=next_token, limit=limit
) )
except InvalidToken as exc: except InvalidToken as exc:

View File

@ -89,7 +89,7 @@ class Op:
self.lhs = lhs self.lhs = lhs
self.rhs = rhs self.rhs = rhs
def expr(self, item: Optional[Item]) -> bool: # type: ignore def expr(self, item: Optional[Item]) -> bool:
raise NotImplementedError(f"Expr not defined for {type(self)}") raise NotImplementedError(f"Expr not defined for {type(self)}")
def __repr__(self) -> str: def __repr__(self) -> str:
@ -862,10 +862,10 @@ class ConditionExpressionParser:
class Operand: class Operand:
def expr(self, item: Optional[Item]) -> Any: # type: ignore def expr(self, item: Optional[Item]) -> Any:
raise NotImplementedError raise NotImplementedError
def get_type(self, item: Optional[Item]) -> Optional[str]: # type: ignore def get_type(self, item: Optional[Item]) -> Optional[str]:
raise NotImplementedError raise NotImplementedError

View File

@ -494,7 +494,7 @@ class DynamoDBBackend(BaseBackend):
expression_attribute_names, expression_attribute_names,
expression_attribute_values, expression_attribute_values,
) )
if not condition_op.expr(item): # type: ignore[arg-type] if not condition_op.expr(item):
raise ConditionalCheckFailed raise ConditionalCheckFailed
return table.delete_item(hash_value, range_value) return table.delete_item(hash_value, range_value)
@ -565,7 +565,7 @@ class DynamoDBBackend(BaseBackend):
expression_attribute_names, expression_attribute_names,
expression_attribute_values, expression_attribute_values,
) )
if not condition_op.expr(current): # type: ignore[arg-type] if not condition_op.expr(current):
raise ConditionalCheckFailed() raise ConditionalCheckFailed()
elif "Put" in item: elif "Put" in item:
item = item["Put"] item = item["Put"]

View File

@ -320,7 +320,7 @@ class Table(CloudFormationModel):
def has_cfn_attr(cls, attr: str) -> bool: def has_cfn_attr(cls, attr: str) -> bool:
return attr in ["Arn", "StreamArn"] return attr in ["Arn", "StreamArn"]
def get_cfn_attribute(self, attribute_name: str) -> Any: # type: ignore[misc] def get_cfn_attribute(self, attribute_name: str) -> Any:
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == "Arn": if attribute_name == "Arn":

View File

@ -51,7 +51,7 @@ class NodeExecutor:
if len(path_nodes) == 0: if len(path_nodes) == 0:
return item.attrs return item.attrs
else: else:
return ExpressionPathResolver( # type: ignore return ExpressionPathResolver(
self.expression_attribute_names self.expression_attribute_names
).resolve_expression_path_nodes_to_dynamo_type(item, path_nodes) ).resolve_expression_path_nodes_to_dynamo_type(item, path_nodes)

View File

@ -40,7 +40,7 @@ class ShardIterator(BaseModel):
return {"ShardIterator": self.arn} return {"ShardIterator": self.arn}
def get(self, limit: int = 1000) -> Dict[str, Any]: def get(self, limit: int = 1000) -> Dict[str, Any]:
items = self.stream_shard.get(self.sequence_number, limit) # type: ignore[no-untyped-call] items = self.stream_shard.get(self.sequence_number, limit)
try: try:
last_sequence_number = max( last_sequence_number = max(
int(i["dynamodb"]["SequenceNumber"]) for i in items int(i["dynamodb"]["SequenceNumber"]) for i in items
@ -76,7 +76,7 @@ class DynamoDBStreamsBackend(BaseBackend):
def _get_table_from_arn(self, arn: str) -> Table: def _get_table_from_arn(self, arn: str) -> Table:
table_name = arn.split(":", 6)[5].split("/")[1] table_name = arn.split(":", 6)[5].split("/")[1]
return self.dynamodb.get_table(table_name) # type: ignore[no-untyped-call] return self.dynamodb.get_table(table_name)
def describe_stream(self, arn: str) -> str: def describe_stream(self, arn: str) -> str:
table = self._get_table_from_arn(arn) table = self._get_table_from_arn(arn)

View File

@ -489,7 +489,7 @@ class EBSBackend:
return snapshot return snapshot
def delete_snapshot(self, snapshot_id: str) -> Snapshot: def delete_snapshot(self, snapshot_id: str) -> Snapshot:
if snapshot_id in self.snapshots: # type: ignore[attr-defined] if snapshot_id in self.snapshots:
snapshot = self.snapshots[snapshot_id] snapshot = self.snapshots[snapshot_id]
if snapshot.from_ami and snapshot.from_ami in self.amis: # type: ignore[attr-defined] if snapshot.from_ami and snapshot.from_ami in self.amis: # type: ignore[attr-defined]
raise InvalidSnapshotInUse(snapshot_id, snapshot.from_ami) raise InvalidSnapshotInUse(snapshot_id, snapshot.from_ami)
@ -497,17 +497,17 @@ class EBSBackend:
raise InvalidSnapshotIdError() raise InvalidSnapshotIdError()
def get_create_volume_permission_groups(self, snapshot_id: str) -> Set[str]: def get_create_volume_permission_groups(self, snapshot_id: str) -> Set[str]:
snapshot = self.get_snapshot(snapshot_id) # type: ignore[attr-defined] snapshot = self.get_snapshot(snapshot_id)
return snapshot.create_volume_permission_groups return snapshot.create_volume_permission_groups
def get_create_volume_permission_userids(self, snapshot_id: str) -> Set[str]: def get_create_volume_permission_userids(self, snapshot_id: str) -> Set[str]:
snapshot = self.get_snapshot(snapshot_id) # type: ignore[attr-defined] snapshot = self.get_snapshot(snapshot_id)
return snapshot.create_volume_permission_userids return snapshot.create_volume_permission_userids
def add_create_volume_permission( def add_create_volume_permission(
self, snapshot_id: str, user_ids: List[str], groups: List[str] self, snapshot_id: str, user_ids: List[str], groups: List[str]
) -> None: ) -> None:
snapshot = self.get_snapshot(snapshot_id) # type: ignore[attr-defined] snapshot = self.get_snapshot(snapshot_id)
if user_ids: if user_ids:
snapshot.create_volume_permission_userids.update(user_ids) snapshot.create_volume_permission_userids.update(user_ids)
@ -522,7 +522,7 @@ class EBSBackend:
user_ids: Optional[List[str]] = None, user_ids: Optional[List[str]] = None,
groups: Optional[Iterable[str]] = None, groups: Optional[Iterable[str]] = None,
) -> None: ) -> None:
snapshot = self.get_snapshot(snapshot_id) # type: ignore[attr-defined] snapshot = self.get_snapshot(snapshot_id)
if user_ids: if user_ids:
snapshot.create_volume_permission_userids.difference_update(user_ids) snapshot.create_volume_permission_userids.difference_update(user_ids)

View File

@ -253,7 +253,7 @@ class NetworkInterface(TaggedEC2Resource, CloudFormationModel):
elif filter_name == "description": elif filter_name == "description":
return self.description return self.description
elif filter_name == "attachment.instance-id": elif filter_name == "attachment.instance-id":
return self.instance.id if self.instance else None # type: ignore[attr-defined] return self.instance.id if self.instance else None
elif filter_name == "attachment.instance-owner-id": elif filter_name == "attachment.instance-owner-id":
return self.owner_id return self.owner_id
else: else:

View File

@ -129,7 +129,7 @@ class Fleet(TaggedEC2Resource):
def create_spot_requests(self, weight_to_add: float) -> List[SpotInstanceRequest]: def create_spot_requests(self, weight_to_add: float) -> List[SpotInstanceRequest]:
weight_map, added_weight = self.get_launch_spec_counts(weight_to_add) weight_map, added_weight = self.get_launch_spec_counts(weight_to_add)
for launch_spec, count in weight_map.items(): for launch_spec, count in weight_map.items():
requests = self.ec2_backend.request_spot_instances( # type: ignore[attr-defined] requests = self.ec2_backend.request_spot_instances(
price=launch_spec.spot_price, price=launch_spec.spot_price,
image_id=launch_spec.image_id, image_id=launch_spec.image_id,
count=count, count=count,
@ -157,7 +157,7 @@ class Fleet(TaggedEC2Resource):
def create_on_demand_requests(self, weight_to_add: float) -> None: def create_on_demand_requests(self, weight_to_add: float) -> None:
weight_map, added_weight = self.get_launch_spec_counts(weight_to_add) weight_map, added_weight = self.get_launch_spec_counts(weight_to_add)
for launch_spec, count in weight_map.items(): for launch_spec, count in weight_map.items():
reservation = self.ec2_backend.add_instances( # type: ignore[attr-defined] reservation = self.ec2_backend.add_instances(
image_id=launch_spec.image_id, image_id=launch_spec.image_id,
count=count, count=count,
instance_type=launch_spec.instance_type, instance_type=launch_spec.instance_type,

View File

@ -20,7 +20,7 @@ for _location_type in listdir(root / offerings_path):
full_path = offerings_path + "/" + _location_type + "/" + _region full_path = offerings_path + "/" + _location_type + "/" + _region
res = load_resource(__name__, full_path) res = load_resource(__name__, full_path)
for instance in res: for instance in res:
instance["LocationType"] = _location_type # type: ignore instance["LocationType"] = _location_type
INSTANCE_TYPE_OFFERINGS[_location_type][_region.replace(".json", "")] = res INSTANCE_TYPE_OFFERINGS[_location_type][_region.replace(".json", "")] = res

View File

@ -95,7 +95,7 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel):
template_version = ec2_backend._get_template_from_args(launch_template_arg) template_version = ec2_backend._get_template_from_args(launch_template_arg)
self.image_id = template_version.image_id self.image_id = template_version.image_id
else: else:
self.image_id = image_id # type: ignore self.image_id = image_id
# Check if we have tags to process # Check if we have tags to process
if launch_template_arg: if launch_template_arg:
template_version = ec2_backend._get_template_from_args(launch_template_arg) template_version = ec2_backend._get_template_from_args(launch_template_arg)
@ -214,7 +214,7 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel):
kms_key_id: Optional[str], kms_key_id: Optional[str],
volume_type: Optional[str], volume_type: Optional[str],
) -> None: ) -> None:
volume = self.ec2_backend.create_volume( # type: ignore[attr-defined] volume = self.ec2_backend.create_volume(
size=size, size=size,
zone_name=self._placement.zone, zone_name=self._placement.zone,
snapshot_id=snapshot_id, snapshot_id=snapshot_id,
@ -721,7 +721,7 @@ class InstanceBackend:
kms_key_id = block_device["Ebs"].get("KmsKeyId") kms_key_id = block_device["Ebs"].get("KmsKeyId")
if block_device.get("NoDevice") != "": if block_device.get("NoDevice") != "":
new_instance.add_block_device( # type: ignore[attr-defined] new_instance.add_block_device(
volume_size, volume_size,
device_name, device_name,
snapshot_id, snapshot_id,

View File

@ -1073,7 +1073,7 @@ class SecurityGroupBackend:
ip_protocol, ip_protocol,
from_port, from_port,
to_port, to_port,
ip_ranges, # type: ignore[arg-type] ip_ranges,
_source_groups, _source_groups,
prefix_list_ids, prefix_list_ids,
) )

View File

@ -377,7 +377,7 @@ class SpotFleetRequest(TaggedEC2Resource, CloudFormationModel):
self.spot_requests = [ self.spot_requests = [
req for req in self.spot_requests if req.instance.id not in instance_ids req for req in self.spot_requests if req.instance.id not in instance_ids
] ]
self.ec2_backend.terminate_instances(instance_ids) # type: ignore[attr-defined] self.ec2_backend.terminate_instances(instance_ids)
class SpotRequestBackend: class SpotRequestBackend:

View File

@ -11,7 +11,7 @@ class AmisResponse(EC2BaseResponse):
self.error_on_dryrun() self.error_on_dryrun()
image = self.ec2_backend.create_image( # type: ignore[attr-defined] image = self.ec2_backend.create_image(
instance_id, instance_id,
name, name,
description, description,

View File

@ -1062,7 +1062,7 @@ class EC2ContainerServiceBackend(BaseBackend):
def delete_capacity_provider(self, name_or_arn: str) -> CapacityProvider: def delete_capacity_provider(self, name_or_arn: str) -> CapacityProvider:
provider: CapacityProvider = self._get_provider(name_or_arn) # type: ignore[assignment] provider: CapacityProvider = self._get_provider(name_or_arn) # type: ignore[assignment]
self.capacity_providers.pop(provider.name) self.capacity_providers.pop(provider.name)
return provider # type: ignore[return-value] return provider
def update_capacity_provider( def update_capacity_provider(
self, name_or_arn: str, asg_provider: Dict[str, Any] self, name_or_arn: str, asg_provider: Dict[str, Any]
@ -1975,7 +1975,7 @@ class EC2ContainerServiceBackend(BaseBackend):
yield task_fam yield task_fam
@staticmethod @staticmethod
def _parse_resource_arn(resource_arn: str) -> Dict[str, str]: # type: ignore[misc] def _parse_resource_arn(resource_arn: str) -> Dict[str, str]:
regexes = [ regexes = [
"^arn:aws:ecs:(?P<region>[^:]+):(?P<account_id>[^:]+):(?P<service>[^:]+)/(?P<cluster_id>[^:]+)/(?P<service_id>[^:]+)/ecs-svc/(?P<id>.*)$", "^arn:aws:ecs:(?P<region>[^:]+):(?P<account_id>[^:]+):(?P<service>[^:]+)/(?P<cluster_id>[^:]+)/(?P<service_id>[^:]+)/ecs-svc/(?P<id>.*)$",
"^arn:aws:ecs:(?P<region>[^:]+):(?P<account_id>[^:]+):(?P<service>[^:]+)/(?P<cluster_id>[^:]+)/(?P<id>.*)$", "^arn:aws:ecs:(?P<region>[^:]+):(?P<account_id>[^:]+):(?P<service>[^:]+)/(?P<cluster_id>[^:]+)/(?P<id>.*)$",

View File

@ -270,7 +270,7 @@ class ELBResponse(BaseResponse):
if mb_backend: if mb_backend:
policies = self._get_multi_param("PolicyNames.member") policies = self._get_multi_param("PolicyNames.member")
self.elb_backend.set_load_balancer_policies_of_backend_server( self.elb_backend.set_load_balancer_policies_of_backend_server(
load_balancer_name, instance_port, policies # type: ignore[arg-type] load_balancer_name, instance_port, policies
) )
# else: explode? # else: explode?

View File

@ -644,7 +644,7 @@ class ElasticMapReduceBackend(BaseBackend):
from moto.ec2.exceptions import InvalidSubnetIdError from moto.ec2.exceptions import InvalidSubnetIdError
try: try:
subnet = self.ec2_backend.get_subnet(ec2_subnet_id) # type: ignore subnet = self.ec2_backend.get_subnet(ec2_subnet_id)
except InvalidSubnetIdError: except InvalidSubnetIdError:
warnings.warn( warnings.warn(
f"Could not find Subnet with id: {ec2_subnet_id}\n" f"Could not find Subnet with id: {ec2_subnet_id}\n"

View File

@ -1134,7 +1134,9 @@ class EventsBackend(BaseBackend):
return False return False
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_rule_names_by_target(self, target_arn: str, event_bus_arn: Optional[str]) -> List[Rule]: # type: ignore[misc] def list_rule_names_by_target(
self, target_arn: str, event_bus_arn: Optional[str]
) -> List[Rule]:
event_bus_name = self._normalize_event_bus_arn(event_bus_arn) event_bus_name = self._normalize_event_bus_arn(event_bus_arn)
event_bus = self._get_event_bus(event_bus_name) event_bus = self._get_event_bus(event_bus_name)
matching_rules = [] matching_rules = []
@ -1147,7 +1149,9 @@ class EventsBackend(BaseBackend):
return matching_rules return matching_rules
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_rules(self, prefix: Optional[str] = None, event_bus_arn: Optional[str] = None) -> List[Rule]: # type: ignore[misc] def list_rules(
self, prefix: Optional[str] = None, event_bus_arn: Optional[str] = None
) -> List[Rule]:
event_bus_name = self._normalize_event_bus_arn(event_bus_arn) event_bus_name = self._normalize_event_bus_arn(event_bus_arn)
event_bus = self._get_event_bus(event_bus_name) event_bus = self._get_event_bus(event_bus_name)
match_string = ".*" match_string = ".*"
@ -1497,8 +1501,8 @@ class EventsBackend(BaseBackend):
name = arn.split("/")[-1] name = arn.split("/")[-1]
rules = [bus.rules for bus in self.event_buses.values()] rules = [bus.rules for bus in self.event_buses.values()]
for registry in rules + [self.event_buses]: for registry in rules + [self.event_buses]:
if name in registry: # type: ignore if name in registry:
return self.tagger.list_tags_for_resource(registry[name].arn) # type: ignore return self.tagger.list_tags_for_resource(registry[name].arn)
raise ResourceNotFoundException( raise ResourceNotFoundException(
f"Rule {name} does not exist on EventBus default." f"Rule {name} does not exist on EventBus default."
) )
@ -1507,8 +1511,8 @@ class EventsBackend(BaseBackend):
name = arn.split("/")[-1] name = arn.split("/")[-1]
rules = [bus.rules for bus in self.event_buses.values()] rules = [bus.rules for bus in self.event_buses.values()]
for registry in rules + [self.event_buses]: for registry in rules + [self.event_buses]:
if name in registry: # type: ignore if name in registry:
self.tagger.tag_resource(registry[name].arn, tags) # type: ignore self.tagger.tag_resource(registry[name].arn, tags)
return return
raise ResourceNotFoundException( raise ResourceNotFoundException(
f"Rule {name} does not exist on EventBus default." f"Rule {name} does not exist on EventBus default."
@ -1518,8 +1522,8 @@ class EventsBackend(BaseBackend):
name = arn.split("/")[-1] name = arn.split("/")[-1]
rules = [bus.rules for bus in self.event_buses.values()] rules = [bus.rules for bus in self.event_buses.values()]
for registry in rules + [self.event_buses]: for registry in rules + [self.event_buses]:
if name in registry: # type: ignore if name in registry:
self.tagger.untag_resource_using_names(registry[name].arn, tag_names) # type: ignore self.tagger.untag_resource_using_names(registry[name].arn, tag_names)
return return
raise ResourceNotFoundException( raise ResourceNotFoundException(
f"Rule {name} does not exist on EventBus default." f"Rule {name} does not exist on EventBus default."

View File

@ -331,7 +331,7 @@ class GlueBackend(BaseBackend):
return [self.crawlers[key] for key in self.crawlers] if self.crawlers else [] return [self.crawlers[key] for key in self.crawlers] if self.crawlers else []
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_crawlers(self) -> List["FakeCrawler"]: # type: ignore[misc] def list_crawlers(self) -> List["FakeCrawler"]:
return [crawler for _, crawler in self.crawlers.items()] return [crawler for _, crawler in self.crawlers.items()]
def start_crawler(self, name: str) -> None: def start_crawler(self, name: str) -> None:
@ -406,7 +406,7 @@ class GlueBackend(BaseBackend):
raise JobNotFoundException(name) raise JobNotFoundException(name)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def get_jobs(self) -> List["FakeJob"]: # type: ignore def get_jobs(self) -> List["FakeJob"]:
return [job for _, job in self.jobs.items()] return [job for _, job in self.jobs.items()]
def start_job_run(self, name: str) -> str: def start_job_run(self, name: str) -> str:
@ -418,7 +418,7 @@ class GlueBackend(BaseBackend):
return job.get_job_run(run_id) return job.get_job_run(run_id)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_jobs(self) -> List["FakeJob"]: # type: ignore def list_jobs(self) -> List["FakeJob"]:
return [job for _, job in self.jobs.items()] return [job for _, job in self.jobs.items()]
def delete_job(self, name: str) -> None: def delete_job(self, name: str) -> None:
@ -829,7 +829,7 @@ class GlueBackend(BaseBackend):
raise SessionNotFoundException(session_id) raise SessionNotFoundException(session_id)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_sessions(self) -> List["FakeSession"]: # type: ignore[misc] def list_sessions(self) -> List["FakeSession"]:
return [session for _, session in self.sessions.items()] return [session for _, session in self.sessions.items()]
def stop_session(self, session_id: str) -> None: def stop_session(self, session_id: str) -> None:
@ -884,7 +884,7 @@ class GlueBackend(BaseBackend):
trigger.stop_trigger() trigger.stop_trigger()
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def get_triggers(self, dependent_job_name: str) -> List["FakeTrigger"]: # type: ignore def get_triggers(self, dependent_job_name: str) -> List["FakeTrigger"]:
if dependent_job_name: if dependent_job_name:
triggers = [] triggers = []
for trigger in self.triggers.values(): for trigger in self.triggers.values():
@ -898,7 +898,7 @@ class GlueBackend(BaseBackend):
return list(self.triggers.values()) return list(self.triggers.values())
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_triggers(self, dependent_job_name: str) -> List["FakeTrigger"]: # type: ignore def list_triggers(self, dependent_job_name: str) -> List["FakeTrigger"]:
if dependent_job_name: if dependent_job_name:
triggers = [] triggers = []
for trigger in self.triggers.values(): for trigger in self.triggers.values():

View File

@ -117,7 +117,7 @@ class IAMUserAccessKey:
class AssumedRoleAccessKey: class AssumedRoleAccessKey:
@property @property
def backend(self) -> IAMBackend: # type: ignore[misc] def backend(self) -> IAMBackend:
return iam_backends[self.account_id]["global"] return iam_backends[self.account_id]["global"]
def __init__(self, account_id: str, access_key_id: str, headers: Dict[str, str]): def __init__(self, account_id: str, access_key_id: str, headers: Dict[str, str]):
@ -351,7 +351,7 @@ class IAMPolicy:
else: else:
policy_document = policy["policy_document"] policy_document = policy["policy_document"]
self._policy_json = json.loads(policy_document) # type: ignore[arg-type] self._policy_json = json.loads(policy_document)
def is_action_permitted( def is_action_permitted(
self, action: str, resource: str = "*" self, action: str, resource: str = "*"

View File

@ -2268,7 +2268,7 @@ class IAMBackend(BaseBackend):
ref_key = key.lower() ref_key = key.lower()
self._validate_tag_key(key, exception_param="tagKeys") self._validate_tag_key(key, exception_param="tagKeys")
policy.tags.pop(ref_key, None) # type: ignore[union-attr] policy.tags.pop(ref_key, None)
def create_policy_version( def create_policy_version(
self, policy_arn: str, policy_document: str, set_as_default: str self, policy_arn: str, policy_document: str, set_as_default: str

View File

@ -215,10 +215,8 @@ class IdentityStoreBackend(BaseBackend):
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore @paginate(pagination_model=PAGINATION_MODEL) # type: ignore
def list_group_memberships( def list_group_memberships(
self, self, identity_store_id: str, group_id: str
identity_store_id: str, ) -> List[Any]:
group_id: str,
) -> List[Any]: # type: ignore
identity_store = self.__get_identity_store(identity_store_id) identity_store = self.__get_identity_store(identity_store_id)
return [ return [

View File

@ -1173,7 +1173,7 @@ class IoTBackend(BaseBackend):
if version.version_id == version_id: if version.version_id == version_id:
version.is_default = True version.is_default = True
policy.default_version_id = version.version_id policy.default_version_id = version.version_id
policy.document = version.document # type: ignore policy.document = version.document
else: else:
version.is_default = False version.is_default = False
@ -1729,7 +1729,9 @@ class IoTBackend(BaseBackend):
return job_executions, next_token return job_executions, next_token
@paginate(PAGINATION_MODEL) # type: ignore[misc] @paginate(PAGINATION_MODEL) # type: ignore[misc]
def list_job_executions_for_thing(self, thing_name: str, status: Optional[str]) -> List[Dict[str, Any]]: # type: ignore[misc] def list_job_executions_for_thing(
self, thing_name: str, status: Optional[str]
) -> List[Dict[str, Any]]:
job_executions = [ job_executions = [
self.job_executions[je].to_dict() self.job_executions[je].to_dict()
for je in self.job_executions for je in self.job_executions

View File

@ -321,7 +321,7 @@ class Stream(CloudFormationModel):
) )
def update_shard_count(self, target_shard_count: int) -> None: def update_shard_count(self, target_shard_count: int) -> None:
if self.stream_mode.get("StreamMode", "") == "ON_DEMAND": # type: ignore if self.stream_mode.get("StreamMode", "") == "ON_DEMAND":
raise StreamCannotBeUpdatedError( raise StreamCannotBeUpdatedError(
stream_name=self.stream_name, account_id=self.account_id stream_name=self.stream_name, account_id=self.account_id
) )
@ -784,7 +784,9 @@ class KinesisBackend(BaseBackend):
return current_shard_count return current_shard_count
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_shards(self, stream_arn: Optional[str], stream_name: Optional[str]) -> List[Dict[str, Any]]: # type: ignore def list_shards(
self, stream_arn: Optional[str], stream_name: Optional[str]
) -> List[Dict[str, Any]]:
stream = self.describe_stream(stream_arn=stream_arn, stream_name=stream_name) stream = self.describe_stream(stream_arn=stream_arn, stream_name=stream_name)
shards = sorted(stream.shards.values(), key=lambda x: x.shard_id) shards = sorted(stream.shards.values(), key=lambda x: x.shard_id)
return [shard.to_json() for shard in shards] return [shard.to_json() for shard in shards]

View File

@ -21,7 +21,7 @@ class KmsResponse(BaseResponse):
def __init__(self) -> None: def __init__(self) -> None:
super().__init__(service_name="kms") super().__init__(service_name="kms")
def _get_param(self, param_name: str, if_none: Any = None) -> Any: # type: ignore def _get_param(self, param_name: str, if_none: Any = None) -> Any:
params = json.loads(self.body) params = json.loads(self.body)
for key in ("Plaintext", "CiphertextBlob"): for key in ("Plaintext", "CiphertextBlob"):

View File

@ -172,7 +172,7 @@ class LogStream(BaseModel):
self.filter_name, self.filter_name,
log_group_name, log_group_name,
log_stream_name, log_stream_name,
formatted_log_events, # type: ignore formatted_log_events,
) )
elif service == "firehose": elif service == "firehose":
from moto.firehose import firehose_backends from moto.firehose import firehose_backends
@ -182,7 +182,7 @@ class LogStream(BaseModel):
self.filter_name, self.filter_name,
log_group_name, log_group_name,
log_stream_name, log_stream_name,
formatted_log_events, # type: ignore formatted_log_events,
) )
elif service == "kinesis": elif service == "kinesis":
from moto.kinesis import kinesis_backends from moto.kinesis import kinesis_backends
@ -193,7 +193,7 @@ class LogStream(BaseModel):
self.filter_name, self.filter_name,
log_group_name, log_group_name,
log_stream_name, log_stream_name,
formatted_log_events, # type: ignore formatted_log_events,
) )
return f"{self.upload_sequence_token:056d}" return f"{self.upload_sequence_token:056d}"
@ -725,7 +725,9 @@ class LogsBackend(BaseBackend):
del self.groups[log_group_name] del self.groups[log_group_name]
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def describe_log_groups(self, log_group_name_prefix: Optional[str] = None) -> List[Dict[str, Any]]: # type: ignore[misc] def describe_log_groups(
self, log_group_name_prefix: Optional[str] = None
) -> List[Dict[str, Any]]:
groups = [ groups = [
group.to_describe_dict() group.to_describe_dict()
for name, group in self.groups.items() for name, group in self.groups.items()

View File

@ -41,7 +41,7 @@ class Recorder:
request_body = request.environ["wsgi.input"].read(request_body_size) request_body = request.environ["wsgi.input"].read(request_body_size)
body_str, body_encoded = self._encode_body(body=request_body) body_str, body_encoded = self._encode_body(body=request_body)
except (AttributeError, KeyError): except (AttributeError, KeyError):
body_str = "" # type: ignore[] body_str = ""
body_encoded = False body_encoded = False
finally: finally:
if request_body is not None: if request_body is not None:

View File

@ -197,7 +197,7 @@ class DomainDispatcherApplication:
) )
request_body_size = int(environ["CONTENT_LENGTH"]) request_body_size = int(environ["CONTENT_LENGTH"])
if simple_form and request_body_size: if simple_form and request_body_size:
body = environ["wsgi.input"].read(request_body_size).decode("utf-8") # type: ignore body = environ["wsgi.input"].read(request_body_size).decode("utf-8")
except (KeyError, ValueError): except (KeyError, ValueError):
pass pass
finally: finally:

View File

@ -243,10 +243,7 @@ class Broker(BaseModel):
"id": f"c-{mock_random.get_random_hex(6)}", "id": f"c-{mock_random.get_random_hex(6)}",
"revision": 1, "revision": 1,
} }
self.configurations = { # type: ignore[no-redef] self.configurations = {"current": current_config, "history": []}
"current": current_config,
"history": [],
}
if self.engine_type.upper() == "RABBITMQ": if self.engine_type.upper() == "RABBITMQ":
console_url = f"https://0000.mq.{region}.amazonaws.com" console_url = f"https://0000.mq.{region}.amazonaws.com"
endpoints = ["amqps://mockmq:5671"] endpoints = ["amqps://mockmq:5671"]

View File

@ -253,7 +253,7 @@ class NeptuneBackend(BaseBackend):
if self._db_cluster_options is None: if self._db_cluster_options is None:
from moto.rds.utils import decode_orderable_db_instance from moto.rds.utils import decode_orderable_db_instance
decoded_options: List[Dict[str, Any]] = load_resource( # type: ignore[assignment] decoded_options: List[Dict[str, Any]] = load_resource(
__name__, "../rds/resources/cluster_options/neptune.json" __name__, "../rds/resources/cluster_options/neptune.json"
) )
self._db_cluster_options = [ self._db_cluster_options = [

View File

@ -460,7 +460,9 @@ class OrganizationsBackend(BaseBackend):
return ou.describe() return ou.describe()
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_organizational_units_for_parent(self, **kwargs: Any) -> List[Dict[str, Any]]: # type: ignore def list_organizational_units_for_parent(
self, **kwargs: Any
) -> List[Dict[str, Any]]:
parent_id = self.validate_parent_id(kwargs["parent_id"]) parent_id = self.validate_parent_id(kwargs["parent_id"])
return [ return [
{"Id": ou.id, "Arn": ou.arn, "Name": ou.name} {"Id": ou.id, "Arn": ou.arn, "Name": ou.name}
@ -534,12 +536,12 @@ class OrganizationsBackend(BaseBackend):
return dict(CreateAccountStatuses=accounts_resp, NextToken=next_token) return dict(CreateAccountStatuses=accounts_resp, NextToken=next_token)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_accounts(self) -> List[FakeAccount]: # type: ignore def list_accounts(self) -> List[FakeAccount]:
accounts = [account.describe() for account in self.accounts] accounts = [account.describe() for account in self.accounts]
return sorted(accounts, key=lambda x: x["JoinedTimestamp"]) # type: ignore return sorted(accounts, key=lambda x: x["JoinedTimestamp"]) # type: ignore
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_accounts_for_parent(self, **kwargs: Any) -> Any: # type: ignore def list_accounts_for_parent(self, **kwargs: Any) -> Any:
parent_id = self.validate_parent_id(kwargs["parent_id"]) parent_id = self.validate_parent_id(kwargs["parent_id"])
accounts = [ accounts = [
account.describe() account.describe()

View File

@ -1713,7 +1713,7 @@ class RDSBackend(BaseBackend):
preferred_backup_window = db_kwargs.get("preferred_backup_window") preferred_backup_window = db_kwargs.get("preferred_backup_window")
preferred_maintenance_window = db_kwargs.get("preferred_maintenance_window") preferred_maintenance_window = db_kwargs.get("preferred_maintenance_window")
msg = valid_preferred_maintenance_window( msg = valid_preferred_maintenance_window(
preferred_maintenance_window, preferred_backup_window # type: ignore preferred_maintenance_window, preferred_backup_window
) )
if msg: if msg:
raise RDSClientError("InvalidParameterValue", msg) raise RDSClientError("InvalidParameterValue", msg)
@ -2262,7 +2262,7 @@ class RDSBackend(BaseBackend):
if tags is None: if tags is None:
tags = source_snapshot.tags tags = source_snapshot.tags
else: else:
tags = self._merge_tags(source_snapshot.tags, tags) # type: ignore tags = self._merge_tags(source_snapshot.tags, tags)
return self.create_db_cluster_snapshot( return self.create_db_cluster_snapshot(
db_cluster_identifier=source_snapshot.cluster.db_cluster_identifier, # type: ignore db_cluster_identifier=source_snapshot.cluster.db_cluster_identifier, # type: ignore
db_snapshot_identifier=target_snapshot_identifier, db_snapshot_identifier=target_snapshot_identifier,

View File

@ -677,10 +677,10 @@ class Route53Backend(BaseBackend):
self, dnsnames: Optional[List[str]] self, dnsnames: Optional[List[str]]
) -> Tuple[Optional[str], List[FakeZone]]: ) -> Tuple[Optional[str], List[FakeZone]]:
if dnsnames: if dnsnames:
dnsname = dnsnames[0] # type: ignore dnsname = dnsnames[0]
if dnsname[-1] != ".": if dnsname[-1] != ".":
dnsname += "." dnsname += "."
zones = [zone for zone in self.list_hosted_zones() if zone.name == dnsname] # type: ignore zones = [zone for zone in self.list_hosted_zones() if zone.name == dnsname]
else: else:
dnsname = None dnsname = None
# sort by names, but with domain components reversed # sort by names, but with domain components reversed
@ -694,7 +694,7 @@ class Route53Backend(BaseBackend):
zones = self.list_hosted_zones() zones = self.list_hosted_zones()
zones = sorted(zones, key=sort_key) zones = sorted(zones, key=sort_key)
return dnsname, zones # type: ignore return dnsname, zones
def list_hosted_zones_by_vpc(self, vpc_id: str) -> List[Dict[str, Any]]: def list_hosted_zones_by_vpc(self, vpc_id: str) -> List[Dict[str, Any]]:
""" """
@ -881,7 +881,9 @@ class Route53Backend(BaseBackend):
return self.query_logging_configs[query_logging_config_id] return self.query_logging_configs[query_logging_config_id]
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_query_logging_configs(self, hosted_zone_id: Optional[str] = None) -> List[QueryLoggingConfig]: # type: ignore def list_query_logging_configs(
self, hosted_zone_id: Optional[str] = None
) -> List[QueryLoggingConfig]:
"""Return a list of query logging configs.""" """Return a list of query logging configs."""
if hosted_zone_id: if hosted_zone_id:
# Does the hosted_zone_id exist? # Does the hosted_zone_id exist?

View File

@ -755,7 +755,9 @@ class Route53ResolverBackend(BaseBackend):
return self.resolver_rule_associations[resolver_rule_association_id] return self.resolver_rule_associations[resolver_rule_association_id]
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_resolver_endpoint_ip_addresses(self, resolver_endpoint_id: str) -> List[Dict[str, Any]]: # type: ignore[misc] def list_resolver_endpoint_ip_addresses(
self, resolver_endpoint_id: str
) -> List[Dict[str, Any]]:
self._validate_resolver_endpoint_id(resolver_endpoint_id) self._validate_resolver_endpoint_id(resolver_endpoint_id)
endpoint = self.resolver_endpoints[resolver_endpoint_id] endpoint = self.resolver_endpoints[resolver_endpoint_id]
return endpoint.ip_descriptions() return endpoint.ip_descriptions()
@ -814,7 +816,7 @@ class Route53ResolverBackend(BaseBackend):
return True return True
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_resolver_endpoints(self, filters: Any) -> List[ResolverEndpoint]: # type: ignore[misc] def list_resolver_endpoints(self, filters: Any) -> List[ResolverEndpoint]:
if not filters: if not filters:
filters = [] filters = []
@ -828,7 +830,7 @@ class Route53ResolverBackend(BaseBackend):
return endpoints return endpoints
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_resolver_rules(self, filters: Any) -> List[ResolverRule]: # type: ignore[misc] def list_resolver_rules(self, filters: Any) -> List[ResolverRule]:
if not filters: if not filters:
filters = [] filters = []
@ -842,7 +844,9 @@ class Route53ResolverBackend(BaseBackend):
return rules return rules
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_resolver_rule_associations(self, filters: Any) -> List[ResolverRuleAssociation]: # type: ignore[misc] def list_resolver_rule_associations(
self, filters: Any
) -> List[ResolverRuleAssociation]:
if not filters: if not filters:
filters = [] filters = []
@ -870,7 +874,9 @@ class Route53ResolverBackend(BaseBackend):
) )
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_tags_for_resource(self, resource_arn: str) -> Optional[List[Dict[str, str]]]: # type: ignore[misc] def list_tags_for_resource(
self, resource_arn: str
) -> Optional[List[Dict[str, str]]]:
self._matched_arn(resource_arn) self._matched_arn(resource_arn)
return self.tagger.list_tags_for_resource(resource_arn).get("Tags") return self.tagger.list_tags_for_resource(resource_arn).get("Tags")

View File

@ -1934,7 +1934,7 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider):
etag=etag, etag=etag,
is_versioned=bucket.is_versioned, is_versioned=bucket.is_versioned,
# AWS uses VersionId=null in both requests and responses # AWS uses VersionId=null in both requests and responses
version_id=str(random.uuid4()) if bucket.is_versioned else "null", # type: ignore version_id=str(random.uuid4()) if bucket.is_versioned else "null",
multipart=multipart, multipart=multipart,
encryption=encryption, encryption=encryption,
kms_key_id=kms_key_id, kms_key_id=kms_key_id,

View File

@ -253,7 +253,9 @@ class S3Response(BaseResponse):
return self.bucket_response(request, full_url, headers) return self.bucket_response(request, full_url, headers)
@amzn_request_id @amzn_request_id
def bucket_response(self, request: Any, full_url: str, headers: Any) -> TYPE_RESPONSE: # type: ignore def bucket_response(
self, request: Any, full_url: str, headers: Any
) -> TYPE_RESPONSE:
self.setup_class(request, full_url, headers, use_raw_body=True) self.setup_class(request, full_url, headers, use_raw_body=True)
bucket_name = self.parse_bucket_name_from_url(request, full_url) bucket_name = self.parse_bucket_name_from_url(request, full_url)
self.backend.log_incoming_request(request, bucket_name) self.backend.log_incoming_request(request, bucket_name)
@ -437,8 +439,8 @@ class S3Response(BaseResponse):
for cors_rule in bucket.cors: for cors_rule in bucket.cors:
if cors_rule.allowed_origins is not None: if cors_rule.allowed_origins is not None:
if cors_matches_origin(origin, cors_rule.allowed_origins): # type: ignore if cors_matches_origin(origin, cors_rule.allowed_origins):
response_headers["Access-Control-Allow-Origin"] = origin # type: ignore response_headers["Access-Control-Allow-Origin"] = origin
if cors_rule.allowed_methods is not None: if cors_rule.allowed_methods is not None:
response_headers[ response_headers[
"Access-Control-Allow-Methods" "Access-Control-Allow-Methods"
@ -831,7 +833,7 @@ class S3Response(BaseResponse):
body = self.body.decode("utf-8") body = self.body.decode("utf-8")
ver = re.search(r"<Status>([A-Za-z]+)</Status>", body) ver = re.search(r"<Status>([A-Za-z]+)</Status>", body)
if ver: if ver:
self.backend.put_bucket_versioning(bucket_name, ver.group(1)) # type: ignore self.backend.put_bucket_versioning(bucket_name, ver.group(1))
template = self.response_template(S3_BUCKET_VERSIONING) template = self.response_template(S3_BUCKET_VERSIONING)
return template.render(bucket_versioning_status=ver.group(1)) return template.render(bucket_versioning_status=ver.group(1))
else: else:
@ -1196,7 +1198,9 @@ class S3Response(BaseResponse):
# amz-checksum-sha256:<..>\r\n # amz-checksum-sha256:<..>\r\n
@amzn_request_id @amzn_request_id
def key_response(self, request: Any, full_url: str, headers: Dict[str, Any]) -> TYPE_RESPONSE: # type: ignore[misc] def key_response(
self, request: Any, full_url: str, headers: Dict[str, Any]
) -> TYPE_RESPONSE:
# Key and Control are lumped in because splitting out the regex is too much of a pain :/ # Key and Control are lumped in because splitting out the regex is too much of a pain :/
self.setup_class(request, full_url, headers, use_raw_body=True) self.setup_class(request, full_url, headers, use_raw_body=True)
bucket_name = self.parse_bucket_name_from_url(request, full_url) bucket_name = self.parse_bucket_name_from_url(request, full_url)
@ -2205,7 +2209,7 @@ class S3Response(BaseResponse):
return 200, response_headers, response return 200, response_headers, response
if query.get("uploadId"): if query.get("uploadId"):
multipart_id = query["uploadId"][0] # type: ignore multipart_id = query["uploadId"][0]
multipart, value, etag = self.backend.complete_multipart_upload( multipart, value, etag = self.backend.complete_multipart_upload(
bucket_name, multipart_id, self._complete_multipart_body(body) bucket_name, multipart_id, self._complete_multipart_body(body)

View File

@ -1420,7 +1420,7 @@ class SageMakerModelBackend(BaseBackend):
return resource.tags return resource.tags
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_tags(self, arn: str) -> List[Dict[str, str]]: # type: ignore[misc] def list_tags(self, arn: str) -> List[Dict[str, str]]:
resource = self._get_resource_from_arn(arn) resource = self._get_resource_from_arn(arn)
return resource.tags return resource.tags
@ -1429,7 +1429,7 @@ class SageMakerModelBackend(BaseBackend):
resource.tags = [tag for tag in resource.tags if tag["Key"] not in tag_keys] resource.tags = [tag for tag in resource.tags if tag["Key"] not in tag_keys]
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_experiments(self) -> List["FakeExperiment"]: # type: ignore[misc] def list_experiments(self) -> List["FakeExperiment"]:
return list(self.experiments.values()) return list(self.experiments.values())
def search(self, resource: Any = None, search_expression: Any = None) -> Any: def search(self, resource: Any = None, search_expression: Any = None) -> Any:
@ -1599,7 +1599,11 @@ class SageMakerModelBackend(BaseBackend):
) )
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_trials(self, experiment_name: Optional[str] = None, trial_component_name: Optional[str] = None) -> List["FakeTrial"]: # type: ignore[misc] def list_trials(
self,
experiment_name: Optional[str] = None,
trial_component_name: Optional[str] = None,
) -> List["FakeTrial"]:
trials_fetched = list(self.trials.values()) trials_fetched = list(self.trials.values())
def evaluate_filter_expression(trial_data: FakeTrial) -> bool: def evaluate_filter_expression(trial_data: FakeTrial) -> bool:
@ -1658,7 +1662,9 @@ class SageMakerModelBackend(BaseBackend):
self.trial_components[trial_component_name].update(details_json) self.trial_components[trial_component_name].update(details_json)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_trial_components(self, trial_name: Optional[str] = None) -> List["FakeTrialComponent"]: # type: ignore[misc] def list_trial_components(
self, trial_name: Optional[str] = None
) -> List["FakeTrialComponent"]:
trial_components_fetched = list(self.trial_components.values()) trial_components_fetched = list(self.trial_components.values())
return [ return [
@ -1696,7 +1702,7 @@ class SageMakerModelBackend(BaseBackend):
if trial_name in self.trials.keys(): if trial_name in self.trials.keys():
self.trials[trial_name].trial_components = list( self.trials[trial_name].trial_components = list(
filter( filter(
lambda x: x != trial_component_name, # type: ignore lambda x: x != trial_component_name,
self.trials[trial_name].trial_components, self.trials[trial_name].trial_components,
) )
) )

View File

@ -230,7 +230,7 @@ class SESBackend(BaseBackend):
raise MessageRejectedError("Too many destinations.") raise MessageRejectedError("Too many destinations.")
total_recipient_count = sum( total_recipient_count = sum(
map(lambda d: sum(map(len, d["Destination"].values())), destinations) # type: ignore map(lambda d: sum(map(len, d["Destination"].values())), destinations)
) )
if total_recipient_count > RECIPIENT_LIMIT: if total_recipient_count > RECIPIENT_LIMIT:
raise MessageRejectedError("Too many destinations.") raise MessageRejectedError("Too many destinations.")

View File

@ -183,7 +183,7 @@ class EmailResponse(BaseResponse):
def set_identity_notification_topic(self) -> str: def set_identity_notification_topic(self) -> str:
identity = self.querystring.get("Identity")[0] # type: ignore identity = self.querystring.get("Identity")[0] # type: ignore
not_type = self.querystring.get("NotificationType")[0] # type: ignore not_type = self.querystring.get("NotificationType")[0] # type: ignore
sns_topic = self.querystring.get("SnsTopic") # type: ignore sns_topic = self.querystring.get("SnsTopic")
if sns_topic: if sns_topic:
sns_topic = sns_topic[0] sns_topic = sns_topic[0]
@ -211,7 +211,7 @@ class EmailResponse(BaseResponse):
return template.render(name=configuration_set_name) return template.render(name=configuration_set_name)
def create_configuration_set_event_destination(self) -> str: def create_configuration_set_event_destination(self) -> str:
configuration_set_name = self._get_param("ConfigurationSetName") # type: ignore configuration_set_name = self._get_param("ConfigurationSetName")
is_configuration_event_enabled = self.querystring.get( is_configuration_event_enabled = self.querystring.get(
"EventDestination.Enabled" "EventDestination.Enabled"
)[ )[

View File

@ -113,7 +113,7 @@ class Topic(CloudFormationModel):
return json.dumps(self._policy_json, separators=(",", ":")) return json.dumps(self._policy_json, separators=(",", ":"))
@policy.setter @policy.setter
def policy(self, policy: Any) -> None: # type: ignore[misc] def policy(self, policy: Any) -> None:
self._policy_json = json.loads(policy) self._policy_json = json.loads(policy)
@staticmethod @staticmethod

View File

@ -537,7 +537,9 @@ class Queue(CloudFormationModel):
return result return result
def url(self, request_url: ParseResult) -> str: def url(self, request_url: ParseResult) -> str:
return f"{request_url.scheme}://{request_url.netloc}/{self.account_id}/{self.name}" # type: ignore return (
f"{request_url.scheme}://{request_url.netloc}/{self.account_id}/{self.name}"
)
@property @property
def messages(self) -> List[Message]: def messages(self) -> List[Message]:

View File

@ -256,7 +256,7 @@ class SQSResponse(BaseResponse):
"AttributeName" "AttributeName"
) or self.querystring.get("AttributeName") ) or self.querystring.get("AttributeName")
attributes = self.sqs_backend.get_queue_attributes(queue_name, attribute_names) # type: ignore attributes = self.sqs_backend.get_queue_attributes(queue_name, attribute_names)
if self.is_json(): if self.is_json():
if len(attributes) == 0: if len(attributes) == 0:

View File

@ -250,7 +250,7 @@ class SSOAdminBackend(BaseBackend):
raise ResourceNotFound raise ResourceNotFound
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_permission_sets(self, instance_arn: str) -> List[PermissionSet]: # type: ignore[misc] def list_permission_sets(self, instance_arn: str) -> List[PermissionSet]:
permission_sets = [] permission_sets = []
for permission_set in self.permission_sets: for permission_set in self.permission_sets:
if permission_set.instance_arn == instance_arn: if permission_set.instance_arn == instance_arn:

View File

@ -502,7 +502,7 @@ class StepFunctionBackend(BaseBackend):
return state_machine return state_machine
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_state_machines(self) -> Iterable[StateMachine]: # type: ignore[misc] def list_state_machines(self) -> Iterable[StateMachine]:
return sorted(self.state_machines, key=lambda x: x.creation_date) return sorted(self.state_machines, key=lambda x: x.creation_date)
def describe_state_machine(self, arn: str) -> StateMachine: def describe_state_machine(self, arn: str) -> StateMachine:
@ -550,7 +550,9 @@ class StepFunctionBackend(BaseBackend):
return state_machine.stop_execution(execution_arn) return state_machine.stop_execution(execution_arn)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc]
def list_executions(self, state_machine_arn: str, status_filter: Optional[str] = None) -> Iterable[Execution]: # type: ignore[misc] def list_executions(
self, state_machine_arn: str, status_filter: Optional[str] = None
) -> Iterable[Execution]:
""" """
The status of every execution is set to 'RUNNING' by default. The status of every execution is set to 'RUNNING' by default.
Set the following environment variable if you want to get a FAILED status back: Set the following environment variable if you want to get a FAILED status back:

View File

@ -77,7 +77,7 @@ class Domain(BaseModel):
_all = [] _all = []
for family in self.types[kind].values(): for family in self.types[kind].values():
for _type in family.values(): for _type in family.values():
if _type.status == status: # type: ignore if _type.status == status:
_all.append(_type) _all.append(_type)
return _all return _all

View File

@ -222,7 +222,7 @@ class WorkflowExecution(BaseModel):
# now find the first timeout to process # now find the first timeout to process
first_timeout = None first_timeout = None
if timeout_candidates: if timeout_candidates:
first_timeout = min(timeout_candidates, key=lambda t: t.timestamp) # type: ignore first_timeout = min(timeout_candidates, key=lambda t: t.timestamp)
if first_timeout: if first_timeout:
should_schedule_decision_next = False should_schedule_decision_next = False

View File

@ -195,18 +195,18 @@ class FakeTranscriptionJob(BaseObject, ManagedState):
# If none is set, default to "en-US" # If none is set, default to "en-US"
self.language_codes: List[Dict[str, Any]] = [] # type: ignore[no-redef] self.language_codes: List[Dict[str, Any]] = [] # type: ignore[no-redef]
if self.language_options is None or len(self.language_options) == 0: if self.language_options is None or len(self.language_options) == 0:
self.language_codes.append( # type: ignore self.language_codes.append(
{"LanguageCode": "en-US", "DurationInSeconds": 123.0} {"LanguageCode": "en-US", "DurationInSeconds": 123.0}
) )
else: else:
self.language_codes.append( # type: ignore self.language_codes.append(
{ {
"LanguageCode": self.language_options[0], "LanguageCode": self.language_options[0],
"DurationInSeconds": 123.0, "DurationInSeconds": 123.0,
} }
) )
if len(self.language_options) > 1: if len(self.language_options) > 1:
self.language_codes.append( # type: ignore self.language_codes.append(
{ {
"LanguageCode": self.language_options[1], "LanguageCode": self.language_options[1],
"DurationInSeconds": 321.0, "DurationInSeconds": 321.0,