Techdebt: Improve linting (#7332)

This commit is contained in:
Bert Blommers 2024-02-11 14:47:34 +00:00 committed by GitHub
parent a5a2c22fc8
commit dbd33f9cda
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 67 additions and 70 deletions

View File

@ -2153,7 +2153,7 @@ class LambdaBackend(BaseBackend):
def send_sqs_batch(self, function_arn: str, messages: Any, queue_arn: str) -> bool: def send_sqs_batch(self, function_arn: str, messages: Any, queue_arn: str) -> bool:
success = True success = True
for message in messages: for message in messages:
result = self._send_sqs_message(function_arn, message, queue_arn) # type: ignore[arg-type] result = self._send_sqs_message(function_arn, message, queue_arn)
if not result: if not result:
success = False success = False
return success return success
@ -2335,13 +2335,13 @@ class LambdaBackend(BaseBackend):
self, function_name: str, qualifier: str, raw: str self, function_name: str, qualifier: str, raw: str
) -> Dict[str, Any]: ) -> Dict[str, Any]:
fn = self.get_function(function_name, qualifier) fn = self.get_function(function_name, qualifier)
return fn.policy.add_statement(raw, qualifier) # type: ignore[union-attr] return fn.policy.add_statement(raw, qualifier)
def remove_permission( def remove_permission(
self, function_name: str, sid: str, revision: str = "" self, function_name: str, sid: str, revision: str = ""
) -> None: ) -> None:
fn = self.get_function(function_name) fn = self.get_function(function_name)
fn.policy.del_statement(sid, revision) # type: ignore[union-attr] fn.policy.del_statement(sid, revision)
def get_code_signing_config(self, function_name: str) -> Dict[str, Any]: def get_code_signing_config(self, function_name: str) -> Dict[str, Any]:
fn = self.get_function(function_name) fn = self.get_function(function_name)
@ -2351,7 +2351,7 @@ class LambdaBackend(BaseBackend):
fn = self._lambdas.get_function_by_name_or_arn_with_qualifier( fn = self._lambdas.get_function_by_name_or_arn_with_qualifier(
function_name, qualifier function_name, qualifier
) )
return fn.policy.wire_format() # type: ignore[union-attr] return fn.policy.wire_format()
def update_function_code( def update_function_code(
self, function_name: str, qualifier: str, body: Dict[str, Any] self, function_name: str, qualifier: str, body: Dict[str, Any]

View File

@ -5,4 +5,4 @@ from .models import LambdaBackend, lambda_simple_backends
class LambdaSimpleResponse(LambdaResponse): class LambdaSimpleResponse(LambdaResponse):
@property @property
def backend(self) -> LambdaBackend: def backend(self) -> LambdaBackend:
return lambda_simple_backends[self.current_account][self.region] # type: ignore[return-value] return lambda_simple_backends[self.current_account][self.region]

View File

@ -9,4 +9,4 @@ class BatchSimpleResponse(BatchResponse):
:return: Batch Backend :return: Batch Backend
:rtype: moto.batch.models.BatchBackend :rtype: moto.batch.models.BatchBackend
""" """
return batch_simple_backends[self.current_account][self.region] # type: ignore[return-value] return batch_simple_backends[self.current_account][self.region]

View File

@ -20,7 +20,7 @@ def first_day() -> str:
.replace(minute=0) .replace(minute=0)
.replace(second=0) .replace(second=0)
) )
return iso_8601_datetime_without_milliseconds(as_date) # type: ignore[return-value] return iso_8601_datetime_without_milliseconds(as_date)
class CostCategoryDefinition(BaseModel): class CostCategoryDefinition(BaseModel):

View File

@ -439,7 +439,7 @@ class FakeStack(CloudFormationModel):
@property @property
def creation_time_iso_8601(self) -> str: def creation_time_iso_8601(self) -> str:
return iso_8601_datetime_without_milliseconds(self.creation_time) # type: ignore[return-value] return iso_8601_datetime_without_milliseconds(self.creation_time)
def _add_stack_event( def _add_stack_event(
self, self,
@ -656,7 +656,7 @@ class FakeChangeSet(BaseModel):
@property @property
def creation_time_iso_8601(self) -> str: def creation_time_iso_8601(self) -> str:
return iso_8601_datetime_without_milliseconds(self.creation_time) # type: ignore[return-value] return iso_8601_datetime_without_milliseconds(self.creation_time)
def diff(self) -> List[FakeChange]: def diff(self) -> List[FakeChange]:
changes = [] changes = []

View File

@ -44,9 +44,8 @@ def filter_tasks(tasks: Iterable[Any], filters: List[Dict[str, Any]]) -> Any:
if not filter_function: if not filter_function:
continue continue
# https://github.com/python/mypy/issues/12682
matching_tasks = filter( matching_tasks = filter(
lambda task: filter_function(task, f["Values"]), matching_tasks # type: ignore[arg-type] lambda task: filter_function(task, f["Values"]), matching_tasks
) )
return matching_tasks return matching_tasks

View File

@ -716,7 +716,7 @@ class Table(CloudFormationModel):
possible_results.sort( possible_results.sort(
key=lambda item: conv(item.attrs[index_range_key["AttributeName"]]) # type: ignore key=lambda item: conv(item.attrs[index_range_key["AttributeName"]]) # type: ignore
if item.attrs.get(index_range_key["AttributeName"]) # type: ignore if item.attrs.get(index_range_key["AttributeName"])
else None else None
) )
else: else:

View File

@ -230,4 +230,4 @@ def validate_schema(
if {"S": ""} in range_values: if {"S": ""} in range_values:
raise KeyIsEmptyStringException(index_range_key) raise KeyIsEmptyStringException(index_range_key)
return hash_value, range_comparison, range_values # type: ignore[return-value] return hash_value, range_comparison, range_values

View File

@ -406,8 +406,8 @@ class Instance(TaggedEC2Resource, BotoInstance, CloudFormationModel):
spec.instance_type == self.instance_type spec.instance_type == self.instance_type
and spec.subnet_id == self.subnet_id and spec.subnet_id == self.subnet_id
): ):
fleet.fulfilled_capacity -= spec.weighted_capacity
break break
fleet.fulfilled_capacity -= spec.weighted_capacity
fleet.spot_requests = [ fleet.spot_requests = [
req for req in fleet.spot_requests if req.instance != self req for req in fleet.spot_requests if req.instance != self
] ]

View File

@ -1013,7 +1013,7 @@ class SecurityGroupBackend:
if ip_ranges: if ip_ranges:
for cidr in ip_ranges: for cidr in ip_ranges:
if ( if (
isinstance(cidr, dict) # type: ignore isinstance(cidr, dict)
and not any( and not any(
[ [
is_valid_cidr(cidr.get("CidrIp", "")), is_valid_cidr(cidr.get("CidrIp", "")),
@ -1068,7 +1068,7 @@ class SecurityGroupBackend:
if ip_ranges: if ip_ranges:
for cidr in ip_ranges: for cidr in ip_ranges:
if ( if (
isinstance(cidr, dict) # type: ignore isinstance(cidr, dict)
and not any( and not any(
[ [
is_valid_cidr(cidr.get("CidrIp", "")), is_valid_cidr(cidr.get("CidrIp", "")),

View File

@ -371,7 +371,7 @@ class SpotFleetRequest(TaggedEC2Resource, CloudFormationModel):
if new_fulfilled_capacity - spec.weighted_capacity < self.target_capacity: if new_fulfilled_capacity - spec.weighted_capacity < self.target_capacity:
continue continue
new_fulfilled_capacity -= spec.weighted_capacity new_fulfilled_capacity -= spec.weighted_capacity # pylint: disable=W0631
instance_ids.append(instance.id) instance_ids.append(instance.id)
self.spot_requests = [ self.spot_requests = [

View File

@ -679,14 +679,14 @@ class ECRBackend(BaseBackend):
found = True found = True
response["images"].append(image.response_batch_get_image) response["images"].append(image.response_batch_get_image)
if not found: if not found:
response["failures"].append( response["failures"].append(
{ {
"imageId": {"imageTag": image_id.get("imageTag", "null")}, "imageId": {"imageTag": image_id.get("imageTag", "null")},
"failureCode": "ImageNotFound", "failureCode": "ImageNotFound",
"failureReason": "Requested image not found", "failureReason": "Requested image not found",
} }
) )
return response return response

View File

@ -171,7 +171,7 @@ class Rule(CloudFormationModel):
event_copy = copy.deepcopy(event) event_copy = copy.deepcopy(event)
event_copy["time"] = iso_8601_datetime_without_milliseconds( event_copy["time"] = iso_8601_datetime_without_milliseconds(
utcfromtimestamp(event_copy["time"]) utcfromtimestamp(event_copy["time"]) # type: ignore[arg-type]
) )
log_stream_name = str(random.uuid4()) log_stream_name = str(random.uuid4())
@ -191,7 +191,7 @@ class Rule(CloudFormationModel):
archive_name archive_name
) )
if archive.uuid == archive_uuid: # type: ignore[union-attr] if archive.uuid == archive_uuid: # type: ignore[union-attr]
archive.events.append(event) # type: ignore[arg-type,union-attr] archive.events.append(event) # type: ignore[union-attr]
def _find_api_destination(self, resource_id: str) -> "Destination": def _find_api_destination(self, resource_id: str) -> "Destination":
backend: "EventsBackend" = events_backends[self.account_id][self.region_name] backend: "EventsBackend" = events_backends[self.account_id][self.region_name]
@ -205,7 +205,7 @@ class Rule(CloudFormationModel):
event_copy = copy.deepcopy(event) event_copy = copy.deepcopy(event)
event_copy["time"] = iso_8601_datetime_without_milliseconds( event_copy["time"] = iso_8601_datetime_without_milliseconds(
utcfromtimestamp(event_copy["time"]) utcfromtimestamp(event_copy["time"]) # type: ignore[arg-type]
) )
if group_id: if group_id:

View File

@ -66,7 +66,7 @@ def _invoke_lambda(account_id: str, fn_arn: str, event: Any) -> None:
body = json.dumps(event) body = json.dumps(event)
get_backend(account_id, lambda_region).invoke( get_backend(account_id, lambda_region).invoke(
function_name=fn_arn, function_name=fn_arn,
qualifier=None, # type: ignore[arg-type] qualifier=None,
body=body, body=body,
headers=dict(), headers=dict(),
response_headers=dict(), response_headers=dict(),

View File

@ -1,7 +1,7 @@
from typing import TYPE_CHECKING, List, TypedDict from typing import TYPE_CHECKING, List, TypedDict
if TYPE_CHECKING: if TYPE_CHECKING:
from typing_extentions import Any, Dict, Required, Union from typing_extensions import Any, Dict, Required, Union
# NOTE: Typing is based on the following document https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-event-patterns.html # NOTE: Typing is based on the following document https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-event-patterns.html

View File

@ -68,7 +68,7 @@ class ForecastResponse(BaseResponse):
} }
for dsg in self.forecast_backend.list_dataset_groups() for dsg in self.forecast_backend.list_dataset_groups()
], ],
key=lambda x: x["LastModificationTime"], # type: ignore key=lambda x: x["LastModificationTime"],
reverse=True, reverse=True,
) )
response = {"DatasetGroups": list_all} response = {"DatasetGroups": list_all}

View File

@ -449,7 +449,7 @@ class ManagedPolicy(Policy, CloudFormationModel):
return policy return policy
def __eq__(self, other: Any) -> bool: def __eq__(self, other: Any) -> bool:
return self.arn == other.arn # type: ignore[no-any-return] return self.arn == other.arn
def __hash__(self) -> int: def __hash__(self) -> int:
return self.arn.__hash__() return self.arn.__hash__()
@ -982,7 +982,7 @@ class InstanceProfile(CloudFormationModel):
def to_embedded_config_dict(self) -> Dict[str, Any]: def to_embedded_config_dict(self) -> Dict[str, Any]:
# Instance Profiles aren't a config item itself, but they are returned in IAM roles with # Instance Profiles aren't a config item itself, but they are returned in IAM roles with
# a "config like" json structure It's also different than Role.to_config_dict() # a "config like" json structure. It's also different than Role.to_config_dict()
roles = [] roles = []
for role in self.roles: for role in self.roles:
roles.append( roles.append(
@ -1012,7 +1012,7 @@ class InstanceProfile(CloudFormationModel):
"path": self.path, "path": self.path,
"instanceProfileName": self.name, "instanceProfileName": self.name,
"instanceProfileId": self.id, "instanceProfileId": self.id,
"arn": f"arn:aws:iam::{self.account_id}:instance-profile/{role.name}", "arn": f"arn:aws:iam::{self.account_id}:instance-profile/{role.name}", # pylint: disable=W0631
"createDate": str(self.create_date), "createDate": str(self.create_date),
"roles": roles, "roles": roles,
} }

View File

@ -24,6 +24,6 @@ url_paths = {
"{0}/organizationconfiguration/describe$": Inspector2Response.dispatch, "{0}/organizationconfiguration/describe$": Inspector2Response.dispatch,
"{0}/organizationconfiguration/update$": Inspector2Response.dispatch, "{0}/organizationconfiguration/update$": Inspector2Response.dispatch,
"{0}/tags/(?P<resource_arn>.+)$": Inspector2Response.method_dispatch( "{0}/tags/(?P<resource_arn>.+)$": Inspector2Response.method_dispatch(
Inspector2Response.tags # type: ignore Inspector2Response.tags
), ),
} }

View File

@ -161,7 +161,7 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
host=host, host=host,
path=path, path=path,
headers=req.headers, headers=req.headers,
body=req_body, # type: ignore[arg-type] body=req_body,
form_data=form_data, form_data=form_data,
) )
debug("\t=====RESPONSE========") debug("\t=====RESPONSE========")

View File

@ -28,7 +28,7 @@ def send(
) -> None: ) -> None:
responseUrl = event["ResponseURL"] responseUrl = event["ResponseURL"]
print(responseUrl) print(responseUrl) # noqa: T201
responseBody = { responseBody = {
"Status": responseStatus, "Status": responseStatus,
@ -46,8 +46,8 @@ def send(
json_responseBody = json.dumps(responseBody) json_responseBody = json.dumps(responseBody)
print("Response body:") print("Response body:") # noqa: T201
print(json_responseBody) print(json_responseBody) # noqa: T201
headers = {"content-type": "", "content-length": str(len(json_responseBody))} headers = {"content-type": "", "content-length": str(len(json_responseBody))}
@ -55,8 +55,8 @@ def send(
response = http.request( # type: ignore response = http.request( # type: ignore
"PUT", responseUrl, headers=headers, body=json_responseBody "PUT", responseUrl, headers=headers, body=json_responseBody
) )
print("Status code:", response.status) print("Status code:", response.status) # noqa: T201
except Exception as e: except Exception as e:
print("send(..) failed executing http.request(..):", e) print("send(..) failed executing http.request(..):", e) # noqa: T201

View File

@ -46,7 +46,7 @@ class BaseObject(BaseModel):
response_object[key[0].upper() + key[1:]] = value response_object[key[0].upper() + key[1:]] = value
return response_object return response_object
def response_object(self) -> Dict[str, Any]: # type: ignore[misc] def response_object(self) -> Dict[str, Any]:
return self.gen_response_object() return self.gen_response_object()

View File

@ -88,8 +88,8 @@ def main(argv: Any = None) -> None:
sa = httpd.socket.getsockname() sa = httpd.socket.getsockname()
print("Call `moto_proxy -h` for example invocations") print("Call `moto_proxy -h` for example invocations") # noqa: T201
print(f"Serving HTTP Proxy on {sa[0]}:{sa[1]} ...") # noqa print(f"Serving HTTP Proxy on {sa[0]}:{sa[1]} ...") # noqa: T201
httpd.serve_forever() httpd.serve_forever()

View File

@ -147,7 +147,9 @@ class Cluster:
valid_engines=ClusterEngine.list_cluster_engines(), valid_engines=ClusterEngine.list_cluster_engines(),
) )
) )
self.engine_version = kwargs.get("engine_version") or Cluster.default_engine_version(self.engine) # type: ignore self.engine_version = kwargs.get(
"engine_version"
) or Cluster.default_engine_version(self.engine)
self.engine_mode = kwargs.get("engine_mode") or "provisioned" self.engine_mode = kwargs.get("engine_mode") or "provisioned"
self.iops = kwargs.get("iops") self.iops = kwargs.get("iops")
self.kms_key_id = kwargs.get("kms_key_id") self.kms_key_id = kwargs.get("kms_key_id")
@ -165,7 +167,7 @@ class Cluster:
self.allocated_storage = kwargs.get("allocated_storage") self.allocated_storage = kwargs.get("allocated_storage")
if self.allocated_storage is None: if self.allocated_storage is None:
self.allocated_storage = Cluster.default_allocated_storage( self.allocated_storage = Cluster.default_allocated_storage(
engine=self.engine, storage_type=self.storage_type # type: ignore engine=self.engine, storage_type=self.storage_type
) )
self.master_username = kwargs.get("master_username") self.master_username = kwargs.get("master_username")
self.global_cluster_identifier = kwargs.get("global_cluster_identifier") self.global_cluster_identifier = kwargs.get("global_cluster_identifier")
@ -609,7 +611,7 @@ class Database(CloudFormationModel):
self.allocated_storage = kwargs.get("allocated_storage") self.allocated_storage = kwargs.get("allocated_storage")
if self.allocated_storage is None: if self.allocated_storage is None:
self.allocated_storage = Database.default_allocated_storage( self.allocated_storage = Database.default_allocated_storage(
engine=self.engine, storage_type=self.storage_type # type: ignore engine=self.engine, storage_type=self.storage_type
) )
self.db_cluster_identifier: Optional[str] = kwargs.get("db_cluster_identifier") self.db_cluster_identifier: Optional[str] = kwargs.get("db_cluster_identifier")
self.db_instance_identifier = kwargs.get("db_instance_identifier") self.db_instance_identifier = kwargs.get("db_instance_identifier")
@ -617,7 +619,7 @@ class Database(CloudFormationModel):
self.db_instance_class = kwargs.get("db_instance_class") self.db_instance_class = kwargs.get("db_instance_class")
self.port = kwargs.get("port") self.port = kwargs.get("port")
if self.port is None: if self.port is None:
self.port = Database.default_port(self.engine) # type: ignore self.port = Database.default_port(self.engine)
self.db_instance_identifier = kwargs.get("db_instance_identifier") self.db_instance_identifier = kwargs.get("db_instance_identifier")
self.db_name = kwargs.get("db_name") self.db_name = kwargs.get("db_name")
self.instance_create_time = iso_8601_datetime_with_milliseconds() self.instance_create_time = iso_8601_datetime_with_milliseconds()

View File

@ -277,7 +277,7 @@ class Cluster(TaggableResourceMixin, CloudFormationModel):
"MasterUserPassword": "****", "MasterUserPassword": "****",
"ClusterVersion": self.cluster_version, "ClusterVersion": self.cluster_version,
"VpcSecurityGroups": [ "VpcSecurityGroups": [
{"Status": "active", "VpcSecurityGroupId": group.id} # type: ignore {"Status": "active", "VpcSecurityGroupId": group.id}
for group in self.vpc_security_groups for group in self.vpc_security_groups
], ],
"ClusterSubnetGroupName": self.cluster_subnet_group_name, "ClusterSubnetGroupName": self.cluster_subnet_group_name,

View File

@ -94,13 +94,13 @@ class HealthCheck(CloudFormationModel):
def set_children(self, children: Any) -> None: def set_children(self, children: Any) -> None:
if children and isinstance(children, list): if children and isinstance(children, list):
self.children = children # type: ignore self.children = children
elif children and isinstance(children, str): elif children and isinstance(children, str):
self.children = [children] # type: ignore self.children = [children] # type: ignore
def set_regions(self, regions: Any) -> None: def set_regions(self, regions: Any) -> None:
if regions and isinstance(regions, list): if regions and isinstance(regions, list):
self.regions = regions # type: ignore self.regions = regions
elif regions and isinstance(regions, str): elif regions and isinstance(regions, str):
self.regions = [regions] # type: ignore self.regions = [regions] # type: ignore
@ -723,7 +723,7 @@ class Route53Backend(BaseBackend):
the_zone.delete_rrset(record_set) the_zone.delete_rrset(record_set)
the_zone.rr_changes.append(original_change) the_zone.rr_changes.append(original_change)
@paginate(pagination_model=PAGINATION_MODEL) # type: ignore[misc] @paginate(pagination_model=PAGINATION_MODEL)
def list_hosted_zones(self) -> List[FakeZone]: def list_hosted_zones(self) -> List[FakeZone]:
""" """
The parameters DelegationSetId and HostedZoneType are not yet implemented The parameters DelegationSetId and HostedZoneType are not yet implemented

View File

@ -249,8 +249,8 @@ class Route53(BaseResponse):
elif method == "GET": elif method == "GET":
querystring = parse_qs(self.parsed_url.query) querystring = parse_qs(self.parsed_url.query)
template = Template(LIST_RRSET_RESPONSE) template = Template(LIST_RRSET_RESPONSE)
start_type = querystring.get("type", [None])[0] # type: ignore start_type = querystring.get("type", [None])[0]
start_name = querystring.get("name", [None])[0] # type: ignore start_name = querystring.get("name", [None])[0]
max_items = int(querystring.get("maxitems", ["300"])[0]) max_items = int(querystring.get("maxitems", ["300"])[0])
if start_type and not start_name: if start_type and not start_name:

View File

@ -343,7 +343,7 @@ class S3Response(BaseResponse):
f"Method {method} has not been implemented in the S3 backend yet" f"Method {method} has not been implemented in the S3 backend yet"
) )
def _get_querystring(self, request: Any, full_url: str) -> Dict[str, Any]: # type: ignore[misc] def _get_querystring(self, request: Any, full_url: str) -> Dict[str, Any]:
# Flask's Request has the querystring already parsed # Flask's Request has the querystring already parsed
# In ServerMode, we can use this, instead of manually parsing this # In ServerMode, we can use this, instead of manually parsing this
if hasattr(request, "args"): if hasattr(request, "args"):
@ -1119,7 +1119,7 @@ class S3Response(BaseResponse):
new_key = self.backend.put_object(bucket_name, key, f) new_key = self.backend.put_object(bucket_name, key, f)
if self.querystring.get("acl"): if self.querystring.get("acl"):
acl = get_canned_acl(self.querystring["acl"][0]) # type: ignore acl = get_canned_acl(self.querystring["acl"][0])
new_key.set_acl(acl) new_key.set_acl(acl)
# Metadata # Metadata
@ -1490,7 +1490,7 @@ class S3Response(BaseResponse):
unquote(copy_source_parsed.path).lstrip("/").split("/", 1) unquote(copy_source_parsed.path).lstrip("/").split("/", 1)
) )
src_version_id = parse_qs(copy_source_parsed.query).get( src_version_id = parse_qs(copy_source_parsed.query).get(
"versionId", [None] # type: ignore "versionId", [None]
)[0] )[0]
src_range = request.headers.get("x-amz-copy-source-range", "").split( src_range = request.headers.get("x-amz-copy-source-range", "").split(
"bytes=" "bytes="
@ -1642,7 +1642,7 @@ class S3Response(BaseResponse):
unquote(copy_source_parsed.path).lstrip("/").split("/", 1) unquote(copy_source_parsed.path).lstrip("/").split("/", 1)
) )
src_version_id = parse_qs(copy_source_parsed.query).get( src_version_id = parse_qs(copy_source_parsed.query).get(
"versionId", [None] # type: ignore "versionId", [None]
)[0] )[0]
key_to_copy = self.backend.get_object( key_to_copy = self.backend.get_object(

View File

@ -1,3 +1,8 @@
[build-system] [build-system]
requires = ["setuptools >= 40.6.0"] requires = ["setuptools >= 40.6.0"]
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
[tool.ruff.lint]
ignore = ["E501"]
extend-select = ["I", "T201", "T203"]

View File

@ -1,2 +0,0 @@
ignore = ["E501"]
extend-select = ["I"]

View File

@ -268,17 +268,12 @@ markers =
[coverage:run] [coverage:run]
relative_files = True relative_files = True
[flake8]
ignore = W503,W605,E128,E501,E203,E266,E501,E231,FS003,E704
exclude = moto/packages,dist,tests/terraformtests
[pylint.MASTER] [pylint.MASTER]
ignore-paths=moto/packages ignore-paths=moto/packages
[pylint.'MESSAGES CONTROL'] [pylint.'MESSAGES CONTROL']
disable = W,C,R,E disable = W,C,R,E
# future sensible checks = super-init-not-called, unspecified-encoding, undefined-loop-variable enable = arguments-renamed, deprecated-module, function-redefined, redefined-outer-name, signature-differs
enable = anomalous-backslash-in-string, arguments-renamed, dangerous-default-value, deprecated-module, function-redefined, import-self, redefined-builtin, redefined-outer-name, reimported, pointless-statement, super-with-arguments, unused-argument, unused-import, unused-variable, useless-else-on-loop, wildcard-import
[mypy] [mypy]
files= moto, tests/test_core, tests/test_batch_simple files= moto, tests/test_core, tests/test_batch_simple

View File

@ -32,7 +32,7 @@ def dynamodb_aws_verified(create_table: bool = True):
if allow_aws_request: if allow_aws_request:
if create_table: if create_table:
print(f"Test {func} will create DynamoDB Table {table_name}") print(f"Test {func} will create DDB Table {table_name}") # noqa
return create_table_and_test(table_name) return create_table_and_test(table_name)
else: else:
return func() return func()

View File

@ -157,8 +157,6 @@ def test_create_export_task_raises_ClientError_when_bucket_not_found(
destination=destination, destination=destination,
) )
err = exc.value.response["Error"] err = exc.value.response["Error"]
if err["Code"] != "InvalidParameterException":
print(err)
assert err["Code"] == "InvalidParameterException" assert err["Code"] == "InvalidParameterException"
assert ( assert (
err["Message"] err["Message"]

View File

@ -31,7 +31,7 @@ def s3_aws_verified(func):
) )
if allow_aws_request: if allow_aws_request:
print(f"Test {func} will create {bucket_name}") print(f"Test {func} will create {bucket_name}") # noqa: T201
resp = create_bucket_and_test(bucket_name) resp = create_bucket_and_test(bucket_name)
else: else:
with mock_aws(): with mock_aws():