Techdebt: Replace string-format with f-strings (for s* dirs) (#5692)
This commit is contained in:
parent
2093a99485
commit
ba4104c38e
@ -177,9 +177,7 @@ class InvalidRequest(S3ClientError):
|
|||||||
def __init__(self, method, *args, **kwargs):
|
def __init__(self, method, *args, **kwargs):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"InvalidRequest",
|
"InvalidRequest",
|
||||||
"Found unsupported HTTP method in CORS config. Unsupported method is {}".format(
|
f"Found unsupported HTTP method in CORS config. Unsupported method is {method}",
|
||||||
method
|
|
||||||
),
|
|
||||||
*args,
|
*args,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
@ -241,9 +239,7 @@ class InvalidMaxPartArgument(S3ClientError):
|
|||||||
code = 400
|
code = 400
|
||||||
|
|
||||||
def __init__(self, arg, min_val, max_val):
|
def __init__(self, arg, min_val, max_val):
|
||||||
error = "Argument {} must be an integer between {} and {}".format(
|
error = f"Argument {arg} must be an integer between {min_val} and {max_val}"
|
||||||
arg, min_val, max_val
|
|
||||||
)
|
|
||||||
super().__init__("InvalidArgument", error)
|
super().__init__("InvalidArgument", error)
|
||||||
|
|
||||||
|
|
||||||
|
@ -170,9 +170,7 @@ class FakeKey(BaseModel, ManagedState):
|
|||||||
@property
|
@property
|
||||||
def arn(self):
|
def arn(self):
|
||||||
# S3 Objects don't have an ARN, but we do need something unique when creating tags against this resource
|
# S3 Objects don't have an ARN, but we do need something unique when creating tags against this resource
|
||||||
return "arn:aws:s3:::{}/{}/{}".format(
|
return f"arn:aws:s3:::{self.bucket_name}/{self.name}/{self.version_id}"
|
||||||
self.bucket_name, self.name, self.version_id
|
|
||||||
)
|
|
||||||
|
|
||||||
@value.setter
|
@value.setter
|
||||||
def value(self, new_value):
|
def value(self, new_value):
|
||||||
@ -217,7 +215,7 @@ class FakeKey(BaseModel, ManagedState):
|
|||||||
value_md5.update(block)
|
value_md5.update(block)
|
||||||
|
|
||||||
self._etag = value_md5.hexdigest()
|
self._etag = value_md5.hexdigest()
|
||||||
return '"{0}"'.format(self._etag)
|
return f'"{self._etag}"'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_modified_ISO8601(self):
|
def last_modified_ISO8601(self):
|
||||||
@ -254,9 +252,7 @@ class FakeKey(BaseModel, ManagedState):
|
|||||||
if self.status == "IN_PROGRESS":
|
if self.status == "IN_PROGRESS":
|
||||||
header = 'ongoing-request="true"'
|
header = 'ongoing-request="true"'
|
||||||
else:
|
else:
|
||||||
header = 'ongoing-request="false", expiry-date="{0}"'.format(
|
header = f'ongoing-request="false", expiry-date="{self.expiry_date}"'
|
||||||
self.expiry_date
|
|
||||||
)
|
|
||||||
res["x-amz-restore"] = header
|
res["x-amz-restore"] = header
|
||||||
|
|
||||||
if self._is_versioned:
|
if self._is_versioned:
|
||||||
@ -413,7 +409,7 @@ class FakeMultipart(BaseModel):
|
|||||||
|
|
||||||
etag = md5_hash()
|
etag = md5_hash()
|
||||||
etag.update(bytes(md5s))
|
etag.update(bytes(md5s))
|
||||||
return total, "{0}-{1}".format(etag.hexdigest(), count)
|
return total, f"{etag.hexdigest()}-{count}"
|
||||||
|
|
||||||
def set_part(self, part_id, value):
|
def set_part(self, part_id, value):
|
||||||
if part_id < 1:
|
if part_id < 1:
|
||||||
@ -460,9 +456,7 @@ class FakeGrantee(BaseModel):
|
|||||||
return "Group" if self.uri else "CanonicalUser"
|
return "Group" if self.uri else "CanonicalUser"
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "FakeGrantee(display_name: '{}', id: '{}', uri: '{}')".format(
|
return f"FakeGrantee(display_name: '{self.display_name}', id: '{self.id}', uri: '{self.uri}')"
|
||||||
self.display_name, self.id, self.uri
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
ALL_USERS_GRANTEE = FakeGrantee(uri="http://acs.amazonaws.com/groups/global/AllUsers")
|
ALL_USERS_GRANTEE = FakeGrantee(uri="http://acs.amazonaws.com/groups/global/AllUsers")
|
||||||
@ -492,9 +486,7 @@ class FakeGrant(BaseModel):
|
|||||||
self.permissions = permissions
|
self.permissions = permissions
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "FakeGrant(grantees: {}, permissions: {})".format(
|
return f"FakeGrant(grantees: {self.grantees}, permissions: {self.permissions})"
|
||||||
self.grantees, self.permissions
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FakeAcl(BaseModel):
|
class FakeAcl(BaseModel):
|
||||||
@ -513,7 +505,7 @@ class FakeAcl(BaseModel):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "FakeAcl(grants: {})".format(self.grants)
|
return f"FakeAcl(grants: {self.grants})"
|
||||||
|
|
||||||
def to_config_dict(self):
|
def to_config_dict(self):
|
||||||
"""Returns the object into the format expected by AWS Config"""
|
"""Returns the object into the format expected by AWS Config"""
|
||||||
@ -584,7 +576,7 @@ def get_canned_acl(acl):
|
|||||||
FakeGrant([LOG_DELIVERY_GRANTEE], [PERMISSION_READ_ACP, PERMISSION_WRITE])
|
FakeGrant([LOG_DELIVERY_GRANTEE], [PERMISSION_READ_ACP, PERMISSION_WRITE])
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
assert False, "Unknown canned acl: %s" % (acl,)
|
assert False, f"Unknown canned acl: {acl}"
|
||||||
return FakeAcl(grants=grants)
|
return FakeAcl(grants=grants)
|
||||||
|
|
||||||
|
|
||||||
@ -1238,25 +1230,23 @@ class FakeBucket(CloudFormationModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def arn(self):
|
def arn(self):
|
||||||
return "arn:aws:s3:::{}".format(self.name)
|
return f"arn:aws:s3:::{self.name}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def domain_name(self):
|
def domain_name(self):
|
||||||
return "{}.s3.amazonaws.com".format(self.name)
|
return f"{self.name}.s3.amazonaws.com"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dual_stack_domain_name(self):
|
def dual_stack_domain_name(self):
|
||||||
return "{}.s3.dualstack.{}.amazonaws.com".format(self.name, self.region_name)
|
return f"{self.name}.s3.dualstack.{self.region_name}.amazonaws.com"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def regional_domain_name(self):
|
def regional_domain_name(self):
|
||||||
return "{}.s3.{}.amazonaws.com".format(self.name, self.region_name)
|
return f"{self.name}.s3.{self.region_name}.amazonaws.com"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def website_url(self):
|
def website_url(self):
|
||||||
return "http://{}.s3-website.{}.amazonaws.com".format(
|
return f"http://{self.name}.s3-website.{self.region_name}.amazonaws.com"
|
||||||
self.name, self.region_name
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def physical_resource_id(self):
|
def physical_resource_id(self):
|
||||||
@ -2066,7 +2056,7 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider):
|
|||||||
# If delimiter, we need to split out folder_results
|
# If delimiter, we need to split out folder_results
|
||||||
key_without_delimiter = key_without_prefix.split(delimiter)[0]
|
key_without_delimiter = key_without_prefix.split(delimiter)[0]
|
||||||
folder_results.add(
|
folder_results.add(
|
||||||
"{0}{1}{2}".format(prefix, key_without_delimiter, delimiter)
|
f"{prefix}{key_without_delimiter}{delimiter}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
key_results.add(key)
|
key_results.add(key)
|
||||||
|
@ -293,9 +293,7 @@ class S3Response(BaseResponse):
|
|||||||
return self._response_options(bucket_name)
|
return self._response_options(bucket_name)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"Method {0} has not been implemented in the S3 backend yet".format(
|
f"Method {method} has not been implemented in the S3 backend yet"
|
||||||
method
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -404,9 +402,7 @@ class S3Response(BaseResponse):
|
|||||||
for unsup in ("delimiter", "max-uploads"):
|
for unsup in ("delimiter", "max-uploads"):
|
||||||
if unsup in querystring:
|
if unsup in querystring:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"Listing multipart uploads with {} has not been implemented yet.".format(
|
f"Listing multipart uploads with {unsup} has not been implemented yet."
|
||||||
unsup
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
multiparts = list(self.backend.get_all_multiparts(bucket_name).values())
|
multiparts = list(self.backend.get_all_multiparts(bucket_name).values())
|
||||||
if "prefix" in querystring:
|
if "prefix" in querystring:
|
||||||
@ -1068,9 +1064,7 @@ class S3Response(BaseResponse):
|
|||||||
raise InvalidRange(
|
raise InvalidRange(
|
||||||
actual_size=str(length), range_requested=request.headers.get("range")
|
actual_size=str(length), range_requested=request.headers.get("range")
|
||||||
)
|
)
|
||||||
response_headers["content-range"] = "bytes {0}-{1}/{2}".format(
|
response_headers["content-range"] = f"bytes {begin}-{end}/{length}"
|
||||||
begin, end, length
|
|
||||||
)
|
|
||||||
content = response_content[begin : end + 1]
|
content = response_content[begin : end + 1]
|
||||||
response_headers["content-length"] = len(content)
|
response_headers["content-length"] = len(content)
|
||||||
return 206, response_headers, content
|
return 206, response_headers, content
|
||||||
@ -1211,9 +1205,7 @@ class S3Response(BaseResponse):
|
|||||||
return self._response_options(bucket_name)
|
return self._response_options(bucket_name)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"Method {0} has not been implemented in the S3 backend yet".format(
|
f"Method {method} has not been implemented in the S3 backend yet"
|
||||||
method
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _key_response_get(self, bucket_name, query, key_name, headers):
|
def _key_response_get(self, bucket_name, query, key_name, headers):
|
||||||
@ -1287,7 +1279,7 @@ class S3Response(BaseResponse):
|
|||||||
if_unmodified_since = str_to_rfc_1123_datetime(if_unmodified_since)
|
if_unmodified_since = str_to_rfc_1123_datetime(if_unmodified_since)
|
||||||
if key.last_modified.replace(microsecond=0) > if_unmodified_since:
|
if key.last_modified.replace(microsecond=0) > if_unmodified_since:
|
||||||
raise PreconditionFailed("If-Unmodified-Since")
|
raise PreconditionFailed("If-Unmodified-Since")
|
||||||
if if_match and key.etag not in [if_match, '"{0}"'.format(if_match)]:
|
if if_match and key.etag not in [if_match, f'"{if_match}"']:
|
||||||
raise PreconditionFailed("If-Match")
|
raise PreconditionFailed("If-Match")
|
||||||
|
|
||||||
if if_modified_since:
|
if if_modified_since:
|
||||||
@ -1882,17 +1874,17 @@ class S3Response(BaseResponse):
|
|||||||
# 1st verify that the proper notification configuration has been passed in (with an ARN that is close
|
# 1st verify that the proper notification configuration has been passed in (with an ARN that is close
|
||||||
# to being correct -- nothing too complex in the ARN logic):
|
# to being correct -- nothing too complex in the ARN logic):
|
||||||
the_notification = parsed_xml["NotificationConfiguration"].get(
|
the_notification = parsed_xml["NotificationConfiguration"].get(
|
||||||
"{}Configuration".format(name)
|
f"{name}Configuration"
|
||||||
)
|
)
|
||||||
if the_notification:
|
if the_notification:
|
||||||
found_notifications += 1
|
found_notifications += 1
|
||||||
if not isinstance(the_notification, list):
|
if not isinstance(the_notification, list):
|
||||||
the_notification = parsed_xml["NotificationConfiguration"][
|
the_notification = parsed_xml["NotificationConfiguration"][
|
||||||
"{}Configuration".format(name)
|
f"{name}Configuration"
|
||||||
] = [the_notification]
|
] = [the_notification]
|
||||||
|
|
||||||
for n in the_notification:
|
for n in the_notification:
|
||||||
if not n[name].startswith("arn:aws:{}:".format(arn_string)):
|
if not n[name].startswith(f"arn:aws:{arn_string}:"):
|
||||||
raise InvalidNotificationARN()
|
raise InvalidNotificationARN()
|
||||||
|
|
||||||
# 2nd, verify that the Events list is correct:
|
# 2nd, verify that the Events list is correct:
|
||||||
@ -1956,7 +1948,7 @@ class S3Response(BaseResponse):
|
|||||||
response_headers = {}
|
response_headers = {}
|
||||||
if response_meta is not None:
|
if response_meta is not None:
|
||||||
for k in response_meta:
|
for k in response_meta:
|
||||||
response_headers["x-amz-{}".format(k)] = response_meta[k]
|
response_headers[f"x-amz-{k}"] = response_meta[k]
|
||||||
return 204, response_headers, ""
|
return 204, response_headers, ""
|
||||||
|
|
||||||
def _complete_multipart_body(self, body):
|
def _complete_multipart_body(self, body):
|
||||||
|
@ -406,9 +406,7 @@ class FakeEndpointConfig(BaseObject, CloudFormationModel):
|
|||||||
elif "ServerlessConfig" in production_variant.keys():
|
elif "ServerlessConfig" in production_variant.keys():
|
||||||
self.validate_serverless_config(production_variant["ServerlessConfig"])
|
self.validate_serverless_config(production_variant["ServerlessConfig"])
|
||||||
else:
|
else:
|
||||||
message = "Invalid Keys for ProductionVariant: received {} but expected it to contain one of {}".format(
|
message = f"Invalid Keys for ProductionVariant: received {production_variant.keys()} but expected it to contain one of {['InstanceType', 'ServerlessConfig']}"
|
||||||
production_variant.keys(), ["InstanceType", "ServerlessConfig"]
|
|
||||||
)
|
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
|
|
||||||
def validate_serverless_config(self, serverless_config):
|
def validate_serverless_config(self, serverless_config):
|
||||||
@ -416,9 +414,7 @@ class FakeEndpointConfig(BaseObject, CloudFormationModel):
|
|||||||
if not validators.is_one_of(
|
if not validators.is_one_of(
|
||||||
serverless_config["MemorySizeInMB"], VALID_SERVERLESS_MEMORY_SIZE
|
serverless_config["MemorySizeInMB"], VALID_SERVERLESS_MEMORY_SIZE
|
||||||
):
|
):
|
||||||
message = "Value '{}' at 'MemorySizeInMB' failed to satisfy constraint: Member must satisfy enum value set: {}".format(
|
message = f"Value '{serverless_config['MemorySizeInMB']}' at 'MemorySizeInMB' failed to satisfy constraint: Member must satisfy enum value set: {VALID_SERVERLESS_MEMORY_SIZE}"
|
||||||
serverless_config["MemorySizeInMB"], VALID_SERVERLESS_MEMORY_SIZE
|
|
||||||
)
|
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
|
|
||||||
def validate_instance_type(self, instance_type):
|
def validate_instance_type(self, instance_type):
|
||||||
@ -491,9 +487,7 @@ class FakeEndpointConfig(BaseObject, CloudFormationModel):
|
|||||||
"ml.m4.4xlarge",
|
"ml.m4.4xlarge",
|
||||||
]
|
]
|
||||||
if not validators.is_one_of(instance_type, VALID_INSTANCE_TYPES):
|
if not validators.is_one_of(instance_type, VALID_INSTANCE_TYPES):
|
||||||
message = "Value '{}' at 'instanceType' failed to satisfy constraint: Member must satisfy enum value set: {}".format(
|
message = f"Value '{instance_type}' at 'instanceType' failed to satisfy constraint: Member must satisfy enum value set: {VALID_INSTANCE_TYPES}"
|
||||||
instance_type, VALID_INSTANCE_TYPES
|
|
||||||
)
|
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -824,15 +818,13 @@ class FakeSagemakerNotebookInstance(CloudFormationModel):
|
|||||||
"ml.m4.4xlarge",
|
"ml.m4.4xlarge",
|
||||||
]
|
]
|
||||||
if not validators.is_one_of(instance_type, VALID_INSTANCE_TYPES):
|
if not validators.is_one_of(instance_type, VALID_INSTANCE_TYPES):
|
||||||
message = "Value '{}' at 'instanceType' failed to satisfy constraint: Member must satisfy enum value set: {}".format(
|
message = f"Value '{instance_type}' at 'instanceType' failed to satisfy constraint: Member must satisfy enum value set: {VALID_INSTANCE_TYPES}"
|
||||||
instance_type, VALID_INSTANCE_TYPES
|
|
||||||
)
|
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
return "{}.notebook.{}.sagemaker.aws".format(
|
return (
|
||||||
self.notebook_instance_name, self.region_name
|
f"{self.notebook_instance_name}.notebook.{self.region_name}.sagemaker.aws"
|
||||||
)
|
)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
@ -1325,10 +1317,10 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
try:
|
try:
|
||||||
del self.experiments[experiment_name]
|
del self.experiments[experiment_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "Could not find experiment configuration '{}'.".format(
|
arn = FakeTrial.arn_formatter(experiment_name, self.region_name)
|
||||||
FakeTrial.arn_formatter(experiment_name, self.region_name)
|
raise ValidationError(
|
||||||
|
message=f"Could not find experiment configuration '{arn}'."
|
||||||
)
|
)
|
||||||
raise ValidationError(message=message)
|
|
||||||
|
|
||||||
def create_trial(self, trial_name, experiment_name):
|
def create_trial(self, trial_name, experiment_name):
|
||||||
trial = FakeTrial(
|
trial = FakeTrial(
|
||||||
@ -1346,19 +1338,17 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
try:
|
try:
|
||||||
return self.trials[trial_name].response_object
|
return self.trials[trial_name].response_object
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "Could not find trial '{}'.".format(
|
arn = FakeTrial.arn_formatter(trial_name, self.region_name)
|
||||||
FakeTrial.arn_formatter(trial_name, self.region_name)
|
raise ValidationError(message=f"Could not find trial '{arn}'.")
|
||||||
)
|
|
||||||
raise ValidationError(message=message)
|
|
||||||
|
|
||||||
def delete_trial(self, trial_name):
|
def delete_trial(self, trial_name):
|
||||||
try:
|
try:
|
||||||
del self.trials[trial_name]
|
del self.trials[trial_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "Could not find trial configuration '{}'.".format(
|
arn = FakeTrial.arn_formatter(trial_name, self.region_name)
|
||||||
FakeTrial.arn_formatter(trial_name, self.region_name)
|
raise ValidationError(
|
||||||
|
message=f"Could not find trial configuration '{arn}'."
|
||||||
)
|
)
|
||||||
raise ValidationError(message=message)
|
|
||||||
|
|
||||||
@paginate(pagination_model=PAGINATION_MODEL)
|
@paginate(pagination_model=PAGINATION_MODEL)
|
||||||
def list_trials(self, experiment_name=None, trial_component_name=None):
|
def list_trials(self, experiment_name=None, trial_component_name=None):
|
||||||
@ -1396,21 +1386,19 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
try:
|
try:
|
||||||
del self.trial_components[trial_component_name]
|
del self.trial_components[trial_component_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "Could not find trial-component configuration '{}'.".format(
|
arn = FakeTrial.arn_formatter(trial_component_name, self.region_name)
|
||||||
FakeTrial.arn_formatter(trial_component_name, self.region_name)
|
raise ValidationError(
|
||||||
|
message=f"Could not find trial-component configuration '{arn}'."
|
||||||
)
|
)
|
||||||
raise ValidationError(message=message)
|
|
||||||
|
|
||||||
def describe_trial_component(self, trial_component_name):
|
def describe_trial_component(self, trial_component_name):
|
||||||
try:
|
try:
|
||||||
return self.trial_components[trial_component_name].response_object
|
return self.trial_components[trial_component_name].response_object
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "Could not find trial component '{}'.".format(
|
arn = FakeTrialComponent.arn_formatter(
|
||||||
FakeTrialComponent.arn_formatter(
|
|
||||||
trial_component_name, self.account_id, self.region_name
|
trial_component_name, self.account_id, self.region_name
|
||||||
)
|
)
|
||||||
)
|
raise ValidationError(message=f"Could not find trial component '{arn}'.")
|
||||||
raise ValidationError(message=message)
|
|
||||||
|
|
||||||
def _update_trial_component_details(self, trial_component_name, details_json):
|
def _update_trial_component_details(self, trial_component_name, details_json):
|
||||||
self.trial_components[trial_component_name].update(details_json)
|
self.trial_components[trial_component_name].update(details_json)
|
||||||
@ -1511,9 +1499,7 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
def _validate_unique_notebook_instance_name(self, notebook_instance_name):
|
def _validate_unique_notebook_instance_name(self, notebook_instance_name):
|
||||||
if notebook_instance_name in self.notebook_instances:
|
if notebook_instance_name in self.notebook_instances:
|
||||||
duplicate_arn = self.notebook_instances[notebook_instance_name].arn
|
duplicate_arn = self.notebook_instances[notebook_instance_name].arn
|
||||||
message = "Cannot create a duplicate Notebook Instance ({})".format(
|
message = f"Cannot create a duplicate Notebook Instance ({duplicate_arn})"
|
||||||
duplicate_arn
|
|
||||||
)
|
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
|
|
||||||
def get_notebook_instance(self, notebook_instance_name):
|
def get_notebook_instance(self, notebook_instance_name):
|
||||||
@ -1533,9 +1519,7 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
def delete_notebook_instance(self, notebook_instance_name):
|
def delete_notebook_instance(self, notebook_instance_name):
|
||||||
notebook_instance = self.get_notebook_instance(notebook_instance_name)
|
notebook_instance = self.get_notebook_instance(notebook_instance_name)
|
||||||
if not notebook_instance.is_deletable:
|
if not notebook_instance.is_deletable:
|
||||||
message = "Status ({}) not in ([Stopped, Failed]). Unable to transition to (Deleting) for Notebook Instance ({})".format(
|
message = f"Status ({notebook_instance.status}) not in ([Stopped, Failed]). Unable to transition to (Deleting) for Notebook Instance ({notebook_instance.arn})"
|
||||||
notebook_instance.status, notebook_instance.arn
|
|
||||||
)
|
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
del self.notebook_instances[notebook_instance_name]
|
del self.notebook_instances[notebook_instance_name]
|
||||||
|
|
||||||
@ -1546,13 +1530,12 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
notebook_instance_lifecycle_config_name
|
notebook_instance_lifecycle_config_name
|
||||||
in self.notebook_instance_lifecycle_configurations
|
in self.notebook_instance_lifecycle_configurations
|
||||||
):
|
):
|
||||||
message = "Unable to create Notebook Instance Lifecycle Config {}. (Details: Notebook Instance Lifecycle Config already exists.)".format(
|
arn = FakeSageMakerNotebookInstanceLifecycleConfig.arn_formatter(
|
||||||
FakeSageMakerNotebookInstanceLifecycleConfig.arn_formatter(
|
|
||||||
notebook_instance_lifecycle_config_name,
|
notebook_instance_lifecycle_config_name,
|
||||||
self.account_id,
|
self.account_id,
|
||||||
self.region_name,
|
self.region_name,
|
||||||
)
|
)
|
||||||
)
|
message = f"Unable to create Notebook Instance Lifecycle Config {arn}. (Details: Notebook Instance Lifecycle Config already exists.)"
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
lifecycle_config = FakeSageMakerNotebookInstanceLifecycleConfig(
|
lifecycle_config = FakeSageMakerNotebookInstanceLifecycleConfig(
|
||||||
account_id=self.account_id,
|
account_id=self.account_id,
|
||||||
@ -1574,13 +1557,12 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
notebook_instance_lifecycle_config_name
|
notebook_instance_lifecycle_config_name
|
||||||
].response_object
|
].response_object
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "Unable to describe Notebook Instance Lifecycle Config '{}'. (Details: Notebook Instance Lifecycle Config does not exist.)".format(
|
arn = FakeSageMakerNotebookInstanceLifecycleConfig.arn_formatter(
|
||||||
FakeSageMakerNotebookInstanceLifecycleConfig.arn_formatter(
|
|
||||||
notebook_instance_lifecycle_config_name,
|
notebook_instance_lifecycle_config_name,
|
||||||
self.account_id,
|
self.account_id,
|
||||||
self.region_name,
|
self.region_name,
|
||||||
)
|
)
|
||||||
)
|
message = f"Unable to describe Notebook Instance Lifecycle Config '{arn}'. (Details: Notebook Instance Lifecycle Config does not exist.)"
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
|
|
||||||
def delete_notebook_instance_lifecycle_config(
|
def delete_notebook_instance_lifecycle_config(
|
||||||
@ -1591,13 +1573,12 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
notebook_instance_lifecycle_config_name
|
notebook_instance_lifecycle_config_name
|
||||||
]
|
]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "Unable to delete Notebook Instance Lifecycle Config '{}'. (Details: Notebook Instance Lifecycle Config does not exist.)".format(
|
arn = FakeSageMakerNotebookInstanceLifecycleConfig.arn_formatter(
|
||||||
FakeSageMakerNotebookInstanceLifecycleConfig.arn_formatter(
|
|
||||||
notebook_instance_lifecycle_config_name,
|
notebook_instance_lifecycle_config_name,
|
||||||
self.account_id,
|
self.account_id,
|
||||||
self.region_name,
|
self.region_name,
|
||||||
)
|
)
|
||||||
)
|
message = f"Unable to delete Notebook Instance Lifecycle Config '{arn}'. (Details: Notebook Instance Lifecycle Config does not exist.)"
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
|
|
||||||
def create_endpoint_config(
|
def create_endpoint_config(
|
||||||
@ -1867,11 +1848,10 @@ class SageMakerModelBackend(BaseBackend):
|
|||||||
try:
|
try:
|
||||||
return self.training_jobs[training_job_name].response_object
|
return self.training_jobs[training_job_name].response_object
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "Could not find training job '{}'.".format(
|
arn = FakeTrainingJob.arn_formatter(
|
||||||
FakeTrainingJob.arn_formatter(
|
|
||||||
training_job_name, self.account_id, self.region_name
|
training_job_name, self.account_id, self.region_name
|
||||||
)
|
)
|
||||||
)
|
message = f"Could not find training job '{arn}'."
|
||||||
raise ValidationError(message=message)
|
raise ValidationError(message=message)
|
||||||
|
|
||||||
def list_training_jobs(
|
def list_training_jobs(
|
||||||
|
@ -486,9 +486,7 @@ class SageMakerResponse(BaseResponse):
|
|||||||
errors = []
|
errors = []
|
||||||
if max_results and max_results not in max_results_range:
|
if max_results and max_results not in max_results_range:
|
||||||
errors.append(
|
errors.append(
|
||||||
"Value '{0}' at 'maxResults' failed to satisfy constraint: Member must have value less than or equal to {1}".format(
|
f"Value '{max_results}' at 'maxResults' failed to satisfy constraint: Member must have value less than or equal to {max_results_range[-1]}"
|
||||||
max_results, max_results_range[-1]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if sort_by not in allowed_sort_by:
|
if sort_by not in allowed_sort_by:
|
||||||
@ -541,9 +539,7 @@ class SageMakerResponse(BaseResponse):
|
|||||||
errors = []
|
errors = []
|
||||||
if max_results and max_results not in max_results_range:
|
if max_results and max_results not in max_results_range:
|
||||||
errors.append(
|
errors.append(
|
||||||
"Value '{0}' at 'maxResults' failed to satisfy constraint: Member must have value less than or equal to {1}".format(
|
f"Value '{max_results}' at 'maxResults' failed to satisfy constraint: Member must have value less than or equal to {max_results_range[-1]}"
|
||||||
max_results, max_results_range[-1]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if sort_by not in allowed_sort_by:
|
if sort_by not in allowed_sort_by:
|
||||||
|
@ -26,7 +26,7 @@ class SecretHasNoValueException(SecretsManagerClientError):
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
"ResourceNotFoundException",
|
"ResourceNotFoundException",
|
||||||
message="Secrets Manager can't find the specified secret "
|
message="Secrets Manager can't find the specified secret "
|
||||||
"value for staging label: {}".format(version_stage),
|
f"value for staging label: {version_stage}",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -265,9 +265,7 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
if not secret_version:
|
if not secret_version:
|
||||||
raise ResourceNotFoundException(
|
raise ResourceNotFoundException(
|
||||||
"An error occurred (ResourceNotFoundException) when calling the GetSecretValue operation: Secrets "
|
"An error occurred (ResourceNotFoundException) when calling the GetSecretValue operation: Secrets "
|
||||||
"Manager can't find the specified secret value for VersionId: {}".format(
|
f"Manager can't find the specified secret value for VersionId: {version_id}"
|
||||||
version_id
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
response_data = {
|
response_data = {
|
||||||
@ -553,9 +551,7 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
try:
|
try:
|
||||||
func = lambda_backend.get_function(secret.rotation_lambda_arn)
|
func = lambda_backend.get_function(secret.rotation_lambda_arn)
|
||||||
except Exception:
|
except Exception:
|
||||||
msg = "Resource not found for ARN '{}'.".format(
|
msg = f"Resource not found for ARN '{secret.rotation_lambda_arn}'."
|
||||||
secret.rotation_lambda_arn
|
|
||||||
)
|
|
||||||
raise ResourceNotFoundException(msg)
|
raise ResourceNotFoundException(msg)
|
||||||
|
|
||||||
for step in ["create", "set", "test", "finish"]:
|
for step in ["create", "set", "test", "finish"]:
|
||||||
@ -594,11 +590,9 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
# password size must have value less than or equal to 4096
|
# password size must have value less than or equal to 4096
|
||||||
if password_length > 4096:
|
if password_length > 4096:
|
||||||
raise ClientError(
|
raise ClientError(
|
||||||
"ClientError: An error occurred (ValidationException) \
|
f"ClientError: An error occurred (ValidationException) \
|
||||||
when calling the GetRandomPassword operation: 1 validation error detected: Value '{}' at 'passwordLength' \
|
when calling the GetRandomPassword operation: 1 validation error detected: Value '{password_length}' at 'passwordLength' \
|
||||||
failed to satisfy constraint: Member must have value less than or equal to 4096".format(
|
failed to satisfy constraint: Member must have value less than or equal to 4096"
|
||||||
password_length
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if password_length < 4:
|
if password_length < 4:
|
||||||
raise InvalidParameterException(
|
raise InvalidParameterException(
|
||||||
@ -772,14 +766,13 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
if remove_from_version_id:
|
if remove_from_version_id:
|
||||||
if remove_from_version_id not in secret.versions:
|
if remove_from_version_id not in secret.versions:
|
||||||
raise InvalidParameterException(
|
raise InvalidParameterException(
|
||||||
"Not a valid version: %s" % remove_from_version_id
|
f"Not a valid version: {remove_from_version_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
stages = secret.versions[remove_from_version_id]["version_stages"]
|
stages = secret.versions[remove_from_version_id]["version_stages"]
|
||||||
if version_stage not in stages:
|
if version_stage not in stages:
|
||||||
raise InvalidParameterException(
|
raise InvalidParameterException(
|
||||||
"Version stage %s not found in version %s"
|
f"Version stage {version_stage} not found in version {remove_from_version_id}"
|
||||||
% (version_stage, remove_from_version_id)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
stages.remove(version_stage)
|
stages.remove(version_stage)
|
||||||
@ -787,7 +780,7 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
if move_to_version_id:
|
if move_to_version_id:
|
||||||
if move_to_version_id not in secret.versions:
|
if move_to_version_id not in secret.versions:
|
||||||
raise InvalidParameterException(
|
raise InvalidParameterException(
|
||||||
"Not a valid version: %s" % move_to_version_id
|
f"Not a valid version: {move_to_version_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
stages = secret.versions[move_to_version_id]["version_stages"]
|
stages = secret.versions[move_to_version_id]["version_stages"]
|
||||||
|
@ -18,14 +18,12 @@ def _validate_filters(filters):
|
|||||||
raise InvalidParameterException("Invalid filter key")
|
raise InvalidParameterException("Invalid filter key")
|
||||||
if filter_key not in filter_keys():
|
if filter_key not in filter_keys():
|
||||||
raise ValidationException(
|
raise ValidationException(
|
||||||
"1 validation error detected: Value '{}' at 'filters.{}.member.key' failed to satisfy constraint: "
|
f"1 validation error detected: Value '{filter_key}' at 'filters.{(idx + 1)}.member.key' failed to satisfy constraint: "
|
||||||
"Member must satisfy enum value set: [all, name, tag-key, description, tag-value]".format(
|
"Member must satisfy enum value set: [all, name, tag-key, description, tag-value]"
|
||||||
filter_key, idx + 1
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if filter_values is None:
|
if filter_values is None:
|
||||||
raise InvalidParameterException(
|
raise InvalidParameterException(
|
||||||
"Invalid filter values for key: {}".format(filter_key)
|
f"Invalid filter values for key: {filter_key}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ def _exclude_characters(password, exclude_characters):
|
|||||||
for c in exclude_characters:
|
for c in exclude_characters:
|
||||||
if c in string.punctuation:
|
if c in string.punctuation:
|
||||||
# Escape punctuation regex usage
|
# Escape punctuation regex usage
|
||||||
c = r"\{0}".format(c)
|
c = rf"\{c}"
|
||||||
password = re.sub(c, "", str(password))
|
password = re.sub(c, "", str(password))
|
||||||
return password
|
return password
|
||||||
|
|
||||||
|
@ -98,5 +98,5 @@ class MissingRenderingAttributeException(RESTError):
|
|||||||
def __init__(self, var):
|
def __init__(self, var):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"MissingRenderingAttributeException",
|
"MissingRenderingAttributeException",
|
||||||
"Attribute '{0}' is not present in the rendering data.".format(var),
|
f"Attribute '{var}' is not present in the rendering data.",
|
||||||
)
|
)
|
||||||
|
@ -189,7 +189,7 @@ class SESBackend(BaseBackend):
|
|||||||
raise MessageRejectedError("Too many recipients.")
|
raise MessageRejectedError("Too many recipients.")
|
||||||
if not self._is_verified_address(source):
|
if not self._is_verified_address(source):
|
||||||
self.rejected_messages_count += 1
|
self.rejected_messages_count += 1
|
||||||
raise MessageRejectedError("Email address not verified %s" % source)
|
raise MessageRejectedError(f"Email address not verified {source}")
|
||||||
destination_addresses = [
|
destination_addresses = [
|
||||||
address for addresses in destinations.values() for address in addresses
|
address for addresses in destinations.values() for address in addresses
|
||||||
]
|
]
|
||||||
@ -221,10 +221,10 @@ class SESBackend(BaseBackend):
|
|||||||
|
|
||||||
if not self._is_verified_address(source):
|
if not self._is_verified_address(source):
|
||||||
self.rejected_messages_count += 1
|
self.rejected_messages_count += 1
|
||||||
raise MessageRejectedError("Email address not verified %s" % source)
|
raise MessageRejectedError(f"Email address not verified {source}")
|
||||||
|
|
||||||
if not self.templates.get(template[0]):
|
if not self.templates.get(template[0]):
|
||||||
raise TemplateDoesNotExist("Template (%s) does not exist" % template[0])
|
raise TemplateDoesNotExist(f"Template ({template[0]}) does not exist")
|
||||||
|
|
||||||
self.__process_sns_feedback__(source, destinations, region)
|
self.__process_sns_feedback__(source, destinations, region)
|
||||||
|
|
||||||
@ -246,7 +246,7 @@ class SESBackend(BaseBackend):
|
|||||||
raise MessageRejectedError("Too many recipients.")
|
raise MessageRejectedError("Too many recipients.")
|
||||||
if not self._is_verified_address(source):
|
if not self._is_verified_address(source):
|
||||||
self.rejected_messages_count += 1
|
self.rejected_messages_count += 1
|
||||||
raise MessageRejectedError("Email address not verified %s" % source)
|
raise MessageRejectedError(f"Email address not verified {source}")
|
||||||
destination_addresses = [
|
destination_addresses = [
|
||||||
address for addresses in destinations.values() for address in addresses
|
address for addresses in destinations.values() for address in addresses
|
||||||
]
|
]
|
||||||
@ -256,7 +256,7 @@ class SESBackend(BaseBackend):
|
|||||||
raise InvalidParameterValue(msg)
|
raise InvalidParameterValue(msg)
|
||||||
|
|
||||||
if not self.templates.get(template[0]):
|
if not self.templates.get(template[0]):
|
||||||
raise TemplateDoesNotExist("Template (%s) does not exist" % template[0])
|
raise TemplateDoesNotExist(f"Template ({template[0]}) does not exist")
|
||||||
|
|
||||||
self.__process_sns_feedback__(source, destinations, region)
|
self.__process_sns_feedback__(source, destinations, region)
|
||||||
|
|
||||||
@ -314,8 +314,7 @@ class SESBackend(BaseBackend):
|
|||||||
_, source_email_address = parseaddr(source)
|
_, source_email_address = parseaddr(source)
|
||||||
if not self._is_verified_address(source_email_address):
|
if not self._is_verified_address(source_email_address):
|
||||||
raise MessageRejectedError(
|
raise MessageRejectedError(
|
||||||
"Did not have authority to send from email %s"
|
f"Did not have authority to send from email {source_email_address}"
|
||||||
% source_email_address
|
|
||||||
)
|
)
|
||||||
|
|
||||||
recipient_count = len(destinations)
|
recipient_count = len(destinations)
|
||||||
@ -327,8 +326,7 @@ class SESBackend(BaseBackend):
|
|||||||
_, source_email_address = parseaddr(message["from"])
|
_, source_email_address = parseaddr(message["from"])
|
||||||
if not self._is_verified_address(source_email_address):
|
if not self._is_verified_address(source_email_address):
|
||||||
raise MessageRejectedError(
|
raise MessageRejectedError(
|
||||||
"Did not have authority to send from email %s"
|
f"Did not have authority to send from email {source_email_address}"
|
||||||
% source_email_address
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for header in "TO", "CC", "BCC":
|
for header in "TO", "CC", "BCC":
|
||||||
@ -480,9 +478,9 @@ class SESBackend(BaseBackend):
|
|||||||
html_part = template["html_part"]
|
html_part = template["html_part"]
|
||||||
|
|
||||||
for key, value in template_data.items():
|
for key, value in template_data.items():
|
||||||
subject_part = str.replace(str(subject_part), "{{%s}}" % key, value)
|
subject_part = str.replace(str(subject_part), "{{" + key + "}}", value)
|
||||||
text_part = str.replace(str(text_part), "{{%s}}" % key, value)
|
text_part = str.replace(str(text_part), "{{" + key + "}}", value)
|
||||||
html_part = str.replace(str(html_part), "{{%s}}" % key, value)
|
html_part = str.replace(str(html_part), "{{" + key + "}}", value)
|
||||||
|
|
||||||
email_obj = MIMEMultipart("alternative")
|
email_obj = MIMEMultipart("alternative")
|
||||||
|
|
||||||
@ -498,10 +496,8 @@ class SESBackend(BaseBackend):
|
|||||||
|
|
||||||
now = datetime.datetime.now().isoformat()
|
now = datetime.datetime.now().isoformat()
|
||||||
|
|
||||||
rendered_template = "Date: %s\r\nSubject: %s\r\n%s" % (
|
rendered_template = (
|
||||||
now,
|
f"Date: {now}\r\nSubject: {subject_part}\r\n{email_obj.as_string()}"
|
||||||
subject_part,
|
|
||||||
email_obj.as_string(),
|
|
||||||
)
|
)
|
||||||
return rendered_template
|
return rendered_template
|
||||||
|
|
||||||
@ -556,9 +552,7 @@ class SESBackend(BaseBackend):
|
|||||||
self, identity, mail_from_domain=None, behavior_on_mx_failure=None
|
self, identity, mail_from_domain=None, behavior_on_mx_failure=None
|
||||||
):
|
):
|
||||||
if identity not in (self.domains + self.addresses):
|
if identity not in (self.domains + self.addresses):
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(f"Identity '{identity}' does not exist.")
|
||||||
"Identity '{0}' does not exist.".format(identity)
|
|
||||||
)
|
|
||||||
|
|
||||||
if mail_from_domain is None:
|
if mail_from_domain is None:
|
||||||
self.identity_mail_from_domains.pop(identity)
|
self.identity_mail_from_domains.pop(identity)
|
||||||
@ -566,16 +560,16 @@ class SESBackend(BaseBackend):
|
|||||||
|
|
||||||
if not mail_from_domain.endswith(identity):
|
if not mail_from_domain.endswith(identity):
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"Provided MAIL-FROM domain '{0}' is not subdomain of "
|
f"Provided MAIL-FROM domain '{mail_from_domain}' is not subdomain of "
|
||||||
"the domain of the identity '{1}'.".format(mail_from_domain, identity)
|
f"the domain of the identity '{identity}'."
|
||||||
)
|
)
|
||||||
|
|
||||||
if behavior_on_mx_failure not in (None, "RejectMessage", "UseDefaultValue"):
|
if behavior_on_mx_failure not in (None, "RejectMessage", "UseDefaultValue"):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"1 validation error detected: "
|
"1 validation error detected: "
|
||||||
"Value '{0}' at 'behaviorOnMXFailure'"
|
f"Value '{behavior_on_mx_failure}' at 'behaviorOnMXFailure'"
|
||||||
"failed to satisfy constraint: Member must satisfy enum value set: "
|
"failed to satisfy constraint: Member must satisfy enum value set: "
|
||||||
"[RejectMessage, UseDefaultValue]".format(behavior_on_mx_failure)
|
"[RejectMessage, UseDefaultValue]"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.identity_mail_from_domains[identity] = {
|
self.identity_mail_from_domains[identity] = {
|
||||||
|
@ -64,7 +64,7 @@ class EmailResponse(BaseResponse):
|
|||||||
for dest_type in destinations:
|
for dest_type in destinations:
|
||||||
# consume up to 51 to allow exception
|
# consume up to 51 to allow exception
|
||||||
for i in range(1, 52):
|
for i in range(1, 52):
|
||||||
field = "Destination.%s.member.%s" % (dest_type, i)
|
field = f"Destination.{dest_type}.member.{i}"
|
||||||
address = self.querystring.get(field)
|
address = self.querystring.get(field)
|
||||||
if address is None:
|
if address is None:
|
||||||
break
|
break
|
||||||
@ -85,7 +85,7 @@ class EmailResponse(BaseResponse):
|
|||||||
for dest_type in destinations:
|
for dest_type in destinations:
|
||||||
# consume up to 51 to allow exception
|
# consume up to 51 to allow exception
|
||||||
for i in range(1, 52):
|
for i in range(1, 52):
|
||||||
field = "Destination.%s.member.%s" % (dest_type, i)
|
field = f"Destination.{dest_type}.member.{i}"
|
||||||
address = self.querystring.get(field)
|
address = self.querystring.get(field)
|
||||||
if address is None:
|
if address is None:
|
||||||
break
|
break
|
||||||
@ -105,7 +105,7 @@ class EmailResponse(BaseResponse):
|
|||||||
destinations = []
|
destinations = []
|
||||||
for i in range(1, 52):
|
for i in range(1, 52):
|
||||||
destination_field = (
|
destination_field = (
|
||||||
"Destinations.member.%s.Destination.ToAddresses.member.1" % (i)
|
f"Destinations.member.{i}.Destination.ToAddresses.member.1"
|
||||||
)
|
)
|
||||||
if self.querystring.get(destination_field) is None:
|
if self.querystring.get(destination_field) is None:
|
||||||
break
|
break
|
||||||
@ -113,10 +113,8 @@ class EmailResponse(BaseResponse):
|
|||||||
for dest_type in destination:
|
for dest_type in destination:
|
||||||
# consume up to 51 to allow exception
|
# consume up to 51 to allow exception
|
||||||
for j in range(1, 52):
|
for j in range(1, 52):
|
||||||
field = "Destinations.member.%s.Destination.%s.member.%s" % (
|
field = (
|
||||||
i,
|
f"Destinations.member.{i}.Destination.{dest_type}.member.{j}"
|
||||||
dest_type,
|
|
||||||
j,
|
|
||||||
)
|
)
|
||||||
address = self.querystring.get(field)
|
address = self.querystring.get(field)
|
||||||
if address is None:
|
if address is None:
|
||||||
@ -142,7 +140,7 @@ class EmailResponse(BaseResponse):
|
|||||||
destinations = []
|
destinations = []
|
||||||
# consume up to 51 to allow exception
|
# consume up to 51 to allow exception
|
||||||
for i in range(1, 52):
|
for i in range(1, 52):
|
||||||
field = "Destinations.member.%s" % i
|
field = f"Destinations.member.{i}"
|
||||||
address = self.querystring.get(field)
|
address = self.querystring.get(field)
|
||||||
if address is None:
|
if address is None:
|
||||||
break
|
break
|
||||||
|
@ -8,15 +8,7 @@ def random_hex(length):
|
|||||||
|
|
||||||
|
|
||||||
def get_random_message_id():
|
def get_random_message_id():
|
||||||
return "{0}-{1}-{2}-{3}-{4}-{5}-{6}".format(
|
return f"{random_hex(16)}-{random_hex(8)}-{random_hex(4)}-{random_hex(4)}-{random_hex(4)}-{random_hex(12)}-{random_hex(6)}"
|
||||||
random_hex(16),
|
|
||||||
random_hex(8),
|
|
||||||
random_hex(4),
|
|
||||||
random_hex(4),
|
|
||||||
random_hex(4),
|
|
||||||
random_hex(12),
|
|
||||||
random_hex(6),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_address(addr):
|
def is_valid_address(addr):
|
||||||
|
@ -445,7 +445,7 @@ class PlatformEndpoint(BaseModel):
|
|||||||
|
|
||||||
def publish(self, message):
|
def publish(self, message):
|
||||||
if not self.enabled:
|
if not self.enabled:
|
||||||
raise SnsEndpointDisabled("Endpoint %s disabled" % self.id)
|
raise SnsEndpointDisabled(f"Endpoint {self.id} disabled")
|
||||||
|
|
||||||
# This is where we would actually send a message
|
# This is where we would actually send a message
|
||||||
message_id = str(mock_random.uuid4())
|
message_id = str(mock_random.uuid4())
|
||||||
@ -561,13 +561,13 @@ class SNSBackend(BaseBackend):
|
|||||||
self.delete_topic_subscriptions(topic)
|
self.delete_topic_subscriptions(topic)
|
||||||
self.topics.pop(arn)
|
self.topics.pop(arn)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SNSNotFoundError("Topic with arn {0} not found".format(arn))
|
raise SNSNotFoundError(f"Topic with arn {arn} not found")
|
||||||
|
|
||||||
def get_topic(self, arn):
|
def get_topic(self, arn):
|
||||||
try:
|
try:
|
||||||
return self.topics[arn]
|
return self.topics[arn]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SNSNotFoundError("Topic with arn {0} not found".format(arn))
|
raise SNSNotFoundError(f"Topic with arn {arn} not found")
|
||||||
|
|
||||||
def set_topic_attribute(self, topic_arn, attribute_name, attribute_value):
|
def set_topic_attribute(self, topic_arn, attribute_name, attribute_value):
|
||||||
topic = self.get_topic(topic_arn)
|
topic = self.get_topic(topic_arn)
|
||||||
@ -578,12 +578,12 @@ class SNSBackend(BaseBackend):
|
|||||||
if re.search(r"[./-]{2,}", endpoint) or re.search(
|
if re.search(r"[./-]{2,}", endpoint) or re.search(
|
||||||
r"(^[./-]|[./-]$)", endpoint
|
r"(^[./-]|[./-]$)", endpoint
|
||||||
):
|
):
|
||||||
raise SNSInvalidParameter("Invalid SMS endpoint: {}".format(endpoint))
|
raise SNSInvalidParameter(f"Invalid SMS endpoint: {endpoint}")
|
||||||
|
|
||||||
reduced_endpoint = re.sub(r"[./-]", "", endpoint)
|
reduced_endpoint = re.sub(r"[./-]", "", endpoint)
|
||||||
|
|
||||||
if not is_e164(reduced_endpoint):
|
if not is_e164(reduced_endpoint):
|
||||||
raise SNSInvalidParameter("Invalid SMS endpoint: {}".format(endpoint))
|
raise SNSInvalidParameter(f"Invalid SMS endpoint: {endpoint}")
|
||||||
|
|
||||||
# AWS doesn't create duplicates
|
# AWS doesn't create duplicates
|
||||||
old_subscription = self._find_subscription(topic_arn, endpoint, protocol)
|
old_subscription = self._find_subscription(topic_arn, endpoint, protocol)
|
||||||
@ -671,9 +671,9 @@ class SNSBackend(BaseBackend):
|
|||||||
else:
|
else:
|
||||||
if not fifo_topic:
|
if not fifo_topic:
|
||||||
msg = (
|
msg = (
|
||||||
"Value {} for parameter MessageGroupId is invalid. "
|
f"Value {group_id} for parameter MessageGroupId is invalid. "
|
||||||
"Reason: The request include parameter that is not valid for this queue type."
|
"Reason: The request include parameter that is not valid for this queue type."
|
||||||
).format(group_id)
|
)
|
||||||
raise InvalidParameterValue(msg)
|
raise InvalidParameterValue(msg)
|
||||||
message_id = topic.publish(
|
message_id = topic.publish(
|
||||||
message,
|
message,
|
||||||
@ -697,7 +697,7 @@ class SNSBackend(BaseBackend):
|
|||||||
try:
|
try:
|
||||||
return self.applications[arn]
|
return self.applications[arn]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SNSNotFoundError("Application with arn {0} not found".format(arn))
|
raise SNSNotFoundError(f"Application with arn {arn} not found")
|
||||||
|
|
||||||
def set_application_attributes(self, arn, attributes):
|
def set_application_attributes(self, arn, attributes):
|
||||||
application = self.get_application(arn)
|
application = self.get_application(arn)
|
||||||
@ -724,7 +724,7 @@ class SNSBackend(BaseBackend):
|
|||||||
):
|
):
|
||||||
return endpoint
|
return endpoint
|
||||||
raise DuplicateSnsEndpointError(
|
raise DuplicateSnsEndpointError(
|
||||||
"Duplicate endpoint token with different attributes: %s" % token
|
f"Duplicate endpoint token with different attributes: {token}"
|
||||||
)
|
)
|
||||||
platform_endpoint = PlatformEndpoint(
|
platform_endpoint = PlatformEndpoint(
|
||||||
self.account_id,
|
self.account_id,
|
||||||
@ -761,7 +761,7 @@ class SNSBackend(BaseBackend):
|
|||||||
try:
|
try:
|
||||||
del self.platform_endpoints[arn]
|
del self.platform_endpoints[arn]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SNSNotFoundError("Endpoint with arn {0} not found".format(arn))
|
raise SNSNotFoundError(f"Endpoint with arn {arn} not found")
|
||||||
|
|
||||||
def get_subscription_attributes(self, arn):
|
def get_subscription_attributes(self, arn):
|
||||||
subscription = self.subscriptions.get(arn)
|
subscription = self.subscriptions.get(arn)
|
||||||
@ -786,7 +786,7 @@ class SNSBackend(BaseBackend):
|
|||||||
# TODO: should do validation
|
# TODO: should do validation
|
||||||
_subscription = [_ for _ in self.subscriptions.values() if _.arn == arn]
|
_subscription = [_ for _ in self.subscriptions.values() if _.arn == arn]
|
||||||
if not _subscription:
|
if not _subscription:
|
||||||
raise SNSNotFoundError("Subscription with arn {0} not found".format(arn))
|
raise SNSNotFoundError(f"Subscription with arn {arn} not found")
|
||||||
subscription = _subscription[0]
|
subscription = _subscription[0]
|
||||||
|
|
||||||
subscription.attributes[name] = value
|
subscription.attributes[name] = value
|
||||||
@ -837,9 +837,7 @@ class SNSBackend(BaseBackend):
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise SNSInvalidParameter(
|
raise SNSInvalidParameter(
|
||||||
"Invalid parameter: FilterPolicy: Unrecognized match type {type}".format(
|
f"Invalid parameter: FilterPolicy: Unrecognized match type {keyword}"
|
||||||
type=keyword
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
raise SNSInvalidParameter(
|
raise SNSInvalidParameter(
|
||||||
@ -867,9 +865,9 @@ class SNSBackend(BaseBackend):
|
|||||||
raise SNSInvalidParameter("Policy statement action out of service scope!")
|
raise SNSInvalidParameter("Policy statement action out of service scope!")
|
||||||
|
|
||||||
principals = [
|
principals = [
|
||||||
"arn:aws:iam::{}:root".format(account_id) for account_id in aws_account_ids
|
f"arn:aws:iam::{account_id}:root" for account_id in aws_account_ids
|
||||||
]
|
]
|
||||||
actions = ["SNS:{}".format(action_name) for action_name in action_names]
|
actions = [f"SNS:{action_name}" for action_name in action_names]
|
||||||
|
|
||||||
statement = {
|
statement = {
|
||||||
"Sid": label,
|
"Sid": label,
|
||||||
|
@ -49,8 +49,7 @@ class SNSResponse(BaseResponse):
|
|||||||
data_type = value["DataType"]
|
data_type = value["DataType"]
|
||||||
if not data_type:
|
if not data_type:
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"The message attribute '{0}' must contain non-empty "
|
f"The message attribute '{name}' must contain non-empty message attribute value."
|
||||||
"message attribute value.".format(name)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
data_type_parts = data_type.split(".")
|
data_type_parts = data_type.split(".")
|
||||||
@ -60,9 +59,9 @@ class SNSResponse(BaseResponse):
|
|||||||
"Number",
|
"Number",
|
||||||
]:
|
]:
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"The message attribute '{0}' has an invalid message "
|
f"The message attribute '{name}' has an invalid message "
|
||||||
"attribute type, the set of supported type prefixes is "
|
"attribute type, the set of supported type prefixes is "
|
||||||
"Binary, Number, and String.".format(name)
|
"Binary, Number, and String."
|
||||||
)
|
)
|
||||||
|
|
||||||
transform_value = None
|
transform_value = None
|
||||||
@ -77,9 +76,7 @@ class SNSResponse(BaseResponse):
|
|||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"An error occurred (ParameterValueInvalid) "
|
"An error occurred (ParameterValueInvalid) "
|
||||||
"when calling the Publish operation: "
|
"when calling the Publish operation: "
|
||||||
"Could not cast message attribute '{0}' value to number.".format(
|
f"Could not cast message attribute '{name}' value to number."
|
||||||
name
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
transform_value = value["StringValue"]
|
transform_value = value["StringValue"]
|
||||||
@ -87,9 +84,9 @@ class SNSResponse(BaseResponse):
|
|||||||
transform_value = value["BinaryValue"]
|
transform_value = value["BinaryValue"]
|
||||||
if transform_value == "":
|
if transform_value == "":
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"The message attribute '{0}' must contain non-empty "
|
f"The message attribute '{name}' must contain non-empty "
|
||||||
"message attribute value for message attribute "
|
"message attribute value for message attribute "
|
||||||
"type '{1}'.".format(name, data_type[0])
|
f"type '{data_type[0]}'."
|
||||||
)
|
)
|
||||||
|
|
||||||
# transformation
|
# transformation
|
||||||
@ -767,9 +764,7 @@ class SNSResponse(BaseResponse):
|
|||||||
# return error_response, dict(status=400)
|
# return error_response, dict(status=400)
|
||||||
|
|
||||||
template = self.response_template(CONFIRM_SUBSCRIPTION_TEMPLATE)
|
template = self.response_template(CONFIRM_SUBSCRIPTION_TEMPLATE)
|
||||||
return template.render(
|
return template.render(sub_arn=f"{arn}:68762e72-e9b1-410a-8b3b-903da69ee1d5")
|
||||||
sub_arn="{0}:68762e72-e9b1-410a-8b3b-903da69ee1d5".format(arn)
|
|
||||||
)
|
|
||||||
|
|
||||||
def list_tags_for_resource(self):
|
def list_tags_for_resource(self):
|
||||||
arn = self._get_param("ResourceArn")
|
arn = self._get_param("ResourceArn")
|
||||||
|
@ -5,12 +5,12 @@ E164_REGEX = re.compile(r"^\+?[1-9]\d{1,14}$")
|
|||||||
|
|
||||||
|
|
||||||
def make_arn_for_topic(account_id, name, region_name):
|
def make_arn_for_topic(account_id, name, region_name):
|
||||||
return "arn:aws:sns:{0}:{1}:{2}".format(region_name, account_id, name)
|
return f"arn:aws:sns:{region_name}:{account_id}:{name}"
|
||||||
|
|
||||||
|
|
||||||
def make_arn_for_subscription(topic_arn):
|
def make_arn_for_subscription(topic_arn):
|
||||||
subscription_id = mock_random.uuid4()
|
subscription_id = mock_random.uuid4()
|
||||||
return "{0}:{1}".format(topic_arn, subscription_id)
|
return f"{topic_arn}:{subscription_id}"
|
||||||
|
|
||||||
|
|
||||||
def is_e164(number):
|
def is_e164(number):
|
||||||
|
@ -63,7 +63,7 @@ class BatchRequestTooLong(RESTError):
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
"BatchRequestTooLong",
|
"BatchRequestTooLong",
|
||||||
"Batch requests cannot be longer than 262144 bytes. "
|
"Batch requests cannot be longer than 262144 bytes. "
|
||||||
"You have sent {} bytes.".format(length),
|
f"You have sent {length} bytes.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -71,7 +71,7 @@ class BatchEntryIdsNotDistinct(RESTError):
|
|||||||
code = 400
|
code = 400
|
||||||
|
|
||||||
def __init__(self, entry_id):
|
def __init__(self, entry_id):
|
||||||
super().__init__("BatchEntryIdsNotDistinct", "Id {} repeated.".format(entry_id))
|
super().__init__("BatchEntryIdsNotDistinct", f"Id {entry_id} repeated.")
|
||||||
|
|
||||||
|
|
||||||
class TooManyEntriesInBatchRequest(RESTError):
|
class TooManyEntriesInBatchRequest(RESTError):
|
||||||
@ -80,8 +80,7 @@ class TooManyEntriesInBatchRequest(RESTError):
|
|||||||
def __init__(self, number):
|
def __init__(self, number):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"TooManyEntriesInBatchRequest",
|
"TooManyEntriesInBatchRequest",
|
||||||
"Maximum number of entries per request are 10. "
|
"Maximum number of entries per request are 10. " f"You have sent {number}.",
|
||||||
"You have sent {}.".format(number),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -89,9 +88,7 @@ class InvalidAttributeName(RESTError):
|
|||||||
code = 400
|
code = 400
|
||||||
|
|
||||||
def __init__(self, attribute_name):
|
def __init__(self, attribute_name):
|
||||||
super().__init__(
|
super().__init__("InvalidAttributeName", f"Unknown Attribute {attribute_name}.")
|
||||||
"InvalidAttributeName", "Unknown Attribute {}.".format(attribute_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidAttributeValue(RESTError):
|
class InvalidAttributeValue(RESTError):
|
||||||
@ -100,7 +97,7 @@ class InvalidAttributeValue(RESTError):
|
|||||||
def __init__(self, attribute_name):
|
def __init__(self, attribute_name):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"InvalidAttributeValue",
|
"InvalidAttributeValue",
|
||||||
"Invalid value for the parameter {}.".format(attribute_name),
|
f"Invalid value for the parameter {attribute_name}.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -116,8 +113,7 @@ class MissingParameter(RESTError):
|
|||||||
|
|
||||||
def __init__(self, parameter):
|
def __init__(self, parameter):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"MissingParameter",
|
"MissingParameter", f"The request must contain the parameter {parameter}."
|
||||||
"The request must contain the parameter {}.".format(parameter),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -126,5 +122,5 @@ class OverLimit(RESTError):
|
|||||||
|
|
||||||
def __init__(self, count):
|
def __init__(self, count):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"OverLimit", "{} Actions were found, maximum allowed is 7.".format(count)
|
"OverLimit", f"{count} Actions were found, maximum allowed is 7."
|
||||||
)
|
)
|
||||||
|
@ -135,9 +135,9 @@ class Message(BaseModel):
|
|||||||
def validate_attribute_name(name):
|
def validate_attribute_name(name):
|
||||||
if not ATTRIBUTE_NAME_PATTERN.match(name):
|
if not ATTRIBUTE_NAME_PATTERN.match(name):
|
||||||
raise MessageAttributesInvalid(
|
raise MessageAttributesInvalid(
|
||||||
"The message attribute name '{0}' is invalid. "
|
f"The message attribute name '{name}' is invalid. "
|
||||||
"Attribute name can contain A-Z, a-z, 0-9, "
|
"Attribute name can contain A-Z, a-z, 0-9, "
|
||||||
"underscore (_), hyphen (-), and period (.) characters.".format(name)
|
"underscore (_), hyphen (-), and period (.) characters."
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -400,9 +400,7 @@ class Queue(CloudFormationModel):
|
|||||||
else:
|
else:
|
||||||
raise RESTError(
|
raise RESTError(
|
||||||
"AWS.SimpleQueueService.NonExistentQueue",
|
"AWS.SimpleQueueService.NonExistentQueue",
|
||||||
"Could not find DLQ for {0}".format(
|
f"Could not find DLQ for {self.redrive_policy['deadLetterTargetArn']}",
|
||||||
self.redrive_policy["deadLetterTargetArn"]
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -511,8 +509,8 @@ class Queue(CloudFormationModel):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def url(self, request_url):
|
def url(self, request_url):
|
||||||
return "{0}://{1}/{2}/{3}".format(
|
return (
|
||||||
request_url.scheme, request_url.netloc, self.account_id, self.name
|
f"{request_url.scheme}://{request_url.netloc}/{self.account_id}/{self.name}"
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -625,7 +623,7 @@ class Queue(CloudFormationModel):
|
|||||||
else:
|
else:
|
||||||
self._policy_json = {
|
self._policy_json = {
|
||||||
"Version": "2012-10-17",
|
"Version": "2012-10-17",
|
||||||
"Id": "{}/SQSDefaultPolicy".format(self.queue_arn),
|
"Id": f"{self.queue_arn}/SQSDefaultPolicy",
|
||||||
"Statement": [],
|
"Statement": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -692,7 +690,7 @@ class SQSBackend(BaseBackend):
|
|||||||
def list_queues(self, queue_name_prefix):
|
def list_queues(self, queue_name_prefix):
|
||||||
re_str = ".*"
|
re_str = ".*"
|
||||||
if queue_name_prefix:
|
if queue_name_prefix:
|
||||||
re_str = "^{0}.*".format(queue_name_prefix)
|
re_str = f"^{queue_name_prefix}.*"
|
||||||
prefix_re = re.compile(re_str)
|
prefix_re = re.compile(re_str)
|
||||||
qs = []
|
qs = []
|
||||||
for name, q in self.queues.items():
|
for name, q in self.queues.items():
|
||||||
@ -759,9 +757,7 @@ class SQSBackend(BaseBackend):
|
|||||||
queue = self.get_queue(queue_name)
|
queue = self.get_queue(queue_name)
|
||||||
|
|
||||||
if len(message_body) > queue.maximum_message_size:
|
if len(message_body) > queue.maximum_message_size:
|
||||||
msg = "One or more parameters are invalid. Reason: Message must be shorter than {} bytes.".format(
|
msg = f"One or more parameters are invalid. Reason: Message must be shorter than {queue.maximum_message_size} bytes."
|
||||||
queue.maximum_message_size
|
|
||||||
)
|
|
||||||
raise InvalidParameterValue(msg)
|
raise InvalidParameterValue(msg)
|
||||||
|
|
||||||
if delay_seconds:
|
if delay_seconds:
|
||||||
@ -794,9 +790,9 @@ class SQSBackend(BaseBackend):
|
|||||||
else:
|
else:
|
||||||
if not queue.fifo_queue:
|
if not queue.fifo_queue:
|
||||||
msg = (
|
msg = (
|
||||||
"Value {} for parameter MessageGroupId is invalid. "
|
f"Value {group_id} for parameter MessageGroupId is invalid. "
|
||||||
"Reason: The request include parameter that is not valid for this queue type."
|
"Reason: The request include parameter that is not valid for this queue type."
|
||||||
).format(group_id)
|
)
|
||||||
raise InvalidParameterValue(msg)
|
raise InvalidParameterValue(msg)
|
||||||
message.group_id = group_id
|
message.group_id = group_id
|
||||||
|
|
||||||
@ -962,10 +958,8 @@ class SQSBackend(BaseBackend):
|
|||||||
given_visibility_timeout = unix_time_millis() + visibility_timeout_msec
|
given_visibility_timeout = unix_time_millis() + visibility_timeout_msec
|
||||||
if given_visibility_timeout - message.sent_timestamp > 43200 * 1000:
|
if given_visibility_timeout - message.sent_timestamp > 43200 * 1000:
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"Value {0} for parameter VisibilityTimeout is invalid. Reason: Total "
|
f"Value {visibility_timeout} for parameter VisibilityTimeout is invalid. Reason: Total "
|
||||||
"VisibilityTimeout for the message is beyond the limit [43200 seconds]".format(
|
"VisibilityTimeout for the message is beyond the limit [43200 seconds]"
|
||||||
visibility_timeout
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
message.change_visibility(visibility_timeout)
|
message.change_visibility(visibility_timeout)
|
||||||
@ -1012,10 +1006,8 @@ class SQSBackend(BaseBackend):
|
|||||||
)
|
)
|
||||||
if invalid_action:
|
if invalid_action:
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"Value SQS:{} for parameter ActionName is invalid. "
|
f"Value SQS:{invalid_action} for parameter ActionName is invalid. "
|
||||||
"Reason: Only the queue owner is allowed to invoke this action.".format(
|
"Reason: Only the queue owner is allowed to invoke this action."
|
||||||
invalid_action
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
policy = queue._policy_json
|
policy = queue._policy_json
|
||||||
@ -1029,14 +1021,11 @@ class SQSBackend(BaseBackend):
|
|||||||
)
|
)
|
||||||
if statement:
|
if statement:
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"Value {} for parameter Label is invalid. "
|
f"Value {label} for parameter Label is invalid. Reason: Already exists."
|
||||||
"Reason: Already exists.".format(label)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
principals = [
|
principals = [f"arn:aws:iam::{account_id}:root" for account_id in account_ids]
|
||||||
"arn:aws:iam::{}:root".format(account_id) for account_id in account_ids
|
actions = [f"SQS:{action}" for action in actions]
|
||||||
]
|
|
||||||
actions = ["SQS:{}".format(action) for action in actions]
|
|
||||||
|
|
||||||
statement = {
|
statement = {
|
||||||
"Sid": label,
|
"Sid": label,
|
||||||
@ -1058,8 +1047,8 @@ class SQSBackend(BaseBackend):
|
|||||||
|
|
||||||
if len(statements) == len(statements_new):
|
if len(statements) == len(statements_new):
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(
|
||||||
"Value {} for parameter Label is invalid. "
|
f"Value {label} for parameter Label is invalid. "
|
||||||
"Reason: can't find label on existing policy.".format(label)
|
"Reason: can't find label on existing policy."
|
||||||
)
|
)
|
||||||
|
|
||||||
queue._policy_json["Statement"] = statements_new
|
queue._policy_json["Statement"] = statements_new
|
||||||
@ -1071,9 +1060,7 @@ class SQSBackend(BaseBackend):
|
|||||||
raise MissingParameter("Tags")
|
raise MissingParameter("Tags")
|
||||||
|
|
||||||
if len(tags) > 50:
|
if len(tags) > 50:
|
||||||
raise InvalidParameterValue(
|
raise InvalidParameterValue(f"Too many tags added for queue {queue_name}.")
|
||||||
"Too many tags added for queue {}.".format(queue_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
queue.tags.update(tags)
|
queue.tags.update(tags)
|
||||||
|
|
||||||
|
@ -273,27 +273,25 @@ class SQSResponse(BaseResponse):
|
|||||||
|
|
||||||
message_attributes = parse_message_attributes(
|
message_attributes = parse_message_attributes(
|
||||||
self.querystring,
|
self.querystring,
|
||||||
base="SendMessageBatchRequestEntry.{}.".format(index),
|
base=f"SendMessageBatchRequestEntry.{index}.",
|
||||||
)
|
)
|
||||||
|
|
||||||
entries[index] = {
|
entries[index] = {
|
||||||
"Id": value[0],
|
"Id": value[0],
|
||||||
"MessageBody": self.querystring.get(
|
"MessageBody": self.querystring.get(
|
||||||
"SendMessageBatchRequestEntry.{}.MessageBody".format(index)
|
f"SendMessageBatchRequestEntry.{index}.MessageBody"
|
||||||
)[0],
|
)[0],
|
||||||
"DelaySeconds": self.querystring.get(
|
"DelaySeconds": self.querystring.get(
|
||||||
"SendMessageBatchRequestEntry.{}.DelaySeconds".format(index),
|
f"SendMessageBatchRequestEntry.{index}.DelaySeconds",
|
||||||
[None],
|
[None],
|
||||||
)[0],
|
)[0],
|
||||||
"MessageAttributes": message_attributes,
|
"MessageAttributes": message_attributes,
|
||||||
"MessageGroupId": self.querystring.get(
|
"MessageGroupId": self.querystring.get(
|
||||||
"SendMessageBatchRequestEntry.{}.MessageGroupId".format(index),
|
f"SendMessageBatchRequestEntry.{index}.MessageGroupId",
|
||||||
[None],
|
[None],
|
||||||
)[0],
|
)[0],
|
||||||
"MessageDeduplicationId": self.querystring.get(
|
"MessageDeduplicationId": self.querystring.get(
|
||||||
"SendMessageBatchRequestEntry.{}.MessageDeduplicationId".format(
|
f"SendMessageBatchRequestEntry.{index}.MessageDeduplicationId",
|
||||||
index
|
|
||||||
),
|
|
||||||
[None],
|
[None],
|
||||||
)[0],
|
)[0],
|
||||||
}
|
}
|
||||||
@ -329,15 +327,13 @@ class SQSResponse(BaseResponse):
|
|||||||
|
|
||||||
for index in range(1, 11):
|
for index in range(1, 11):
|
||||||
# Loop through looking for messages
|
# Loop through looking for messages
|
||||||
receipt_key = "DeleteMessageBatchRequestEntry.{0}.ReceiptHandle".format(
|
receipt_key = f"DeleteMessageBatchRequestEntry.{index}.ReceiptHandle"
|
||||||
index
|
|
||||||
)
|
|
||||||
receipt_handle = self.querystring.get(receipt_key)
|
receipt_handle = self.querystring.get(receipt_key)
|
||||||
if not receipt_handle:
|
if not receipt_handle:
|
||||||
# Found all messages
|
# Found all messages
|
||||||
break
|
break
|
||||||
|
|
||||||
message_user_id_key = "DeleteMessageBatchRequestEntry.{0}.Id".format(index)
|
message_user_id_key = f"DeleteMessageBatchRequestEntry.{index}.Id"
|
||||||
message_user_id = self.querystring.get(message_user_id_key)[0]
|
message_user_id = self.querystring.get(message_user_id_key)[0]
|
||||||
receipts.append(
|
receipts.append(
|
||||||
{"receipt_handle": receipt_handle[0], "msg_user_id": message_user_id}
|
{"receipt_handle": receipt_handle[0], "msg_user_id": message_user_id}
|
||||||
@ -396,9 +392,9 @@ class SQSResponse(BaseResponse):
|
|||||||
return self._error(
|
return self._error(
|
||||||
"InvalidParameterValue",
|
"InvalidParameterValue",
|
||||||
"An error occurred (InvalidParameterValue) when calling "
|
"An error occurred (InvalidParameterValue) when calling "
|
||||||
"the ReceiveMessage operation: Value %s for parameter "
|
f"the ReceiveMessage operation: Value {message_count} for parameter "
|
||||||
"MaxNumberOfMessages is invalid. Reason: must be between "
|
"MaxNumberOfMessages is invalid. Reason: must be between "
|
||||||
"1 and 10, if provided." % message_count,
|
"1 and 10, if provided.",
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -410,9 +406,9 @@ class SQSResponse(BaseResponse):
|
|||||||
return self._error(
|
return self._error(
|
||||||
"InvalidParameterValue",
|
"InvalidParameterValue",
|
||||||
"An error occurred (InvalidParameterValue) when calling "
|
"An error occurred (InvalidParameterValue) when calling "
|
||||||
"the ReceiveMessage operation: Value %s for parameter "
|
f"the ReceiveMessage operation: Value {wait_time} for parameter "
|
||||||
"WaitTimeSeconds is invalid. Reason: must be <= 0 and "
|
"WaitTimeSeconds is invalid. Reason: must be <= 0 and "
|
||||||
">= 20 if provided." % wait_time,
|
">= 20 if provided.",
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -14,7 +14,7 @@ def extract_input_message_attributes(querystring):
|
|||||||
index = 1
|
index = 1
|
||||||
while True:
|
while True:
|
||||||
# Loop through looking for message attributes
|
# Loop through looking for message attributes
|
||||||
name_key = "MessageAttributeName.{0}".format(index)
|
name_key = f"MessageAttributeName.{index}"
|
||||||
name = querystring.get(name_key)
|
name = querystring.get(name_key)
|
||||||
if not name:
|
if not name:
|
||||||
# Found all attributes
|
# Found all attributes
|
||||||
@ -31,19 +31,17 @@ def parse_message_attributes(
|
|||||||
index = 1
|
index = 1
|
||||||
while True:
|
while True:
|
||||||
# Loop through looking for message attributes
|
# Loop through looking for message attributes
|
||||||
name_key = base + "{0}.{1}.Name".format(key, index)
|
name_key = base + f"{key}.{index}.Name"
|
||||||
name = querystring.get(name_key)
|
name = querystring.get(name_key)
|
||||||
if not name:
|
if not name:
|
||||||
# Found all attributes
|
# Found all attributes
|
||||||
break
|
break
|
||||||
|
|
||||||
data_type_key = base + "{0}.{1}.{2}DataType".format(key, index, value_namespace)
|
data_type_key = base + f"{key}.{index}.{value_namespace}DataType"
|
||||||
data_type = querystring.get(data_type_key)
|
data_type = querystring.get(data_type_key)
|
||||||
if not data_type:
|
if not data_type:
|
||||||
raise MessageAttributesInvalid(
|
raise MessageAttributesInvalid(
|
||||||
"The message attribute '{0}' must contain non-empty message attribute value.".format(
|
f"The message attribute '{name[0]}' must contain non-empty message attribute value."
|
||||||
name[0]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
data_type_parts = data_type[0].split(".")
|
data_type_parts = data_type[0].split(".")
|
||||||
@ -53,24 +51,18 @@ def parse_message_attributes(
|
|||||||
"Number",
|
"Number",
|
||||||
]:
|
]:
|
||||||
raise MessageAttributesInvalid(
|
raise MessageAttributesInvalid(
|
||||||
"The message attribute '{0}' has an invalid message attribute type, the set of supported type prefixes is Binary, Number, and String.".format(
|
f"The message attribute '{name[0]}' has an invalid message attribute type, the set of supported type prefixes is Binary, Number, and String."
|
||||||
name[0]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type_prefix = "String"
|
type_prefix = "String"
|
||||||
if data_type_parts[0] == "Binary":
|
if data_type_parts[0] == "Binary":
|
||||||
type_prefix = "Binary"
|
type_prefix = "Binary"
|
||||||
|
|
||||||
value_key = base + "{0}.{1}.{2}{3}Value".format(
|
value_key = base + f"{key}.{index}.{value_namespace}{type_prefix}Value"
|
||||||
key, index, value_namespace, type_prefix
|
|
||||||
)
|
|
||||||
value = querystring.get(value_key)
|
value = querystring.get(value_key)
|
||||||
if not value:
|
if not value:
|
||||||
raise MessageAttributesInvalid(
|
raise MessageAttributesInvalid(
|
||||||
"The message attribute '{0}' must contain non-empty message attribute value for message attribute type '{1}'.".format(
|
f"The message attribute '{name[0]}' must contain non-empty message attribute value for message attribute type '{data_type[0]}'."
|
||||||
name[0], data_type[0]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
message_attributes[name[0]] = {
|
message_attributes[name[0]] = {
|
||||||
|
@ -212,13 +212,13 @@ class Parameter(CloudFormationModel):
|
|||||||
self.value = value
|
self.value = value
|
||||||
|
|
||||||
def encrypt(self, value):
|
def encrypt(self, value):
|
||||||
return "kms:{}:".format(self.keyid) + value
|
return f"kms:{self.keyid}:" + value
|
||||||
|
|
||||||
def decrypt(self, value):
|
def decrypt(self, value):
|
||||||
if self.type != "SecureString":
|
if self.type != "SecureString":
|
||||||
return value
|
return value
|
||||||
|
|
||||||
prefix = "kms:{}:".format(self.keyid or "default")
|
prefix = f"kms:{self.keyid or 'default'}:"
|
||||||
if value.startswith(prefix):
|
if value.startswith(prefix):
|
||||||
return value[len(prefix) :]
|
return value[len(prefix) :]
|
||||||
|
|
||||||
@ -1342,9 +1342,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
if not re.match(r"^tag:.+|Name|Type|KeyId|Path|Label|Tier$", key):
|
if not re.match(r"^tag:.+|Name|Type|KeyId|Path|Label|Tier$", key):
|
||||||
self._errors.append(
|
self._errors.append(
|
||||||
self._format_error(
|
self._format_error(
|
||||||
key="parameterFilters.{index}.member.key".format(
|
key=f"parameterFilters.{index + 1}.member.key",
|
||||||
index=(index + 1)
|
|
||||||
),
|
|
||||||
value=key,
|
value=key,
|
||||||
constraint="Member must satisfy regular expression pattern: tag:.+|Name|Type|KeyId|Path|Label|Tier",
|
constraint="Member must satisfy regular expression pattern: tag:.+|Name|Type|KeyId|Path|Label|Tier",
|
||||||
)
|
)
|
||||||
@ -1353,9 +1351,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
if len(key) > 132:
|
if len(key) > 132:
|
||||||
self._errors.append(
|
self._errors.append(
|
||||||
self._format_error(
|
self._format_error(
|
||||||
key="parameterFilters.{index}.member.key".format(
|
key=f"parameterFilters.{index + 1}.member.key",
|
||||||
index=(index + 1)
|
|
||||||
),
|
|
||||||
value=key,
|
value=key,
|
||||||
constraint="Member must have length less than or equal to 132",
|
constraint="Member must have length less than or equal to 132",
|
||||||
)
|
)
|
||||||
@ -1364,9 +1360,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
if len(option) > 10:
|
if len(option) > 10:
|
||||||
self._errors.append(
|
self._errors.append(
|
||||||
self._format_error(
|
self._format_error(
|
||||||
key="parameterFilters.{index}.member.option".format(
|
key=f"parameterFilters.{index + 1}.member.option",
|
||||||
index=(index + 1)
|
|
||||||
),
|
|
||||||
value="over 10 chars",
|
value="over 10 chars",
|
||||||
constraint="Member must have length less than or equal to 10",
|
constraint="Member must have length less than or equal to 10",
|
||||||
)
|
)
|
||||||
@ -1375,9 +1369,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
if len(values) > 50:
|
if len(values) > 50:
|
||||||
self._errors.append(
|
self._errors.append(
|
||||||
self._format_error(
|
self._format_error(
|
||||||
key="parameterFilters.{index}.member.values".format(
|
key=f"parameterFilters.{index + 1}.member.values",
|
||||||
index=(index + 1)
|
|
||||||
),
|
|
||||||
value=values,
|
value=values,
|
||||||
constraint="Member must have length less than or equal to 50",
|
constraint="Member must have length less than or equal to 50",
|
||||||
)
|
)
|
||||||
@ -1386,9 +1378,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
if any(len(value) > 1024 for value in values):
|
if any(len(value) > 1024 for value in values):
|
||||||
self._errors.append(
|
self._errors.append(
|
||||||
self._format_error(
|
self._format_error(
|
||||||
key="parameterFilters.{index}.member.values".format(
|
key=f"parameterFilters.{index + 1}.member.values",
|
||||||
index=(index + 1)
|
|
||||||
),
|
|
||||||
value=values,
|
value=values,
|
||||||
constraint="[Member must have length less than or equal to 1024, Member must have length greater than or equal to 1]",
|
constraint="[Member must have length less than or equal to 1024, Member must have length greater than or equal to 1]",
|
||||||
)
|
)
|
||||||
@ -1413,9 +1403,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
|
|
||||||
if by_path and key in ["Name", "Path", "Tier"]:
|
if by_path and key in ["Name", "Path", "Tier"]:
|
||||||
raise InvalidFilterKey(
|
raise InvalidFilterKey(
|
||||||
"The following filter key is not valid: {key}. Valid filter keys include: [Type, KeyId].".format(
|
f"The following filter key is not valid: {key}. Valid filter keys include: [Type, KeyId]."
|
||||||
key=key
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not values:
|
if not values:
|
||||||
@ -1431,9 +1419,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
if key == "Path":
|
if key == "Path":
|
||||||
if option not in ["Recursive", "OneLevel"]:
|
if option not in ["Recursive", "OneLevel"]:
|
||||||
raise InvalidFilterOption(
|
raise InvalidFilterOption(
|
||||||
"The following filter option is not valid: {option}. Valid options include: [Recursive, OneLevel].".format(
|
f"The following filter option is not valid: {option}. Valid options include: [Recursive, OneLevel]."
|
||||||
option=option
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if any(value.lower().startswith(("/aws", "/ssm")) for value in values):
|
if any(value.lower().startswith(("/aws", "/ssm")) for value in values):
|
||||||
raise ValidationException(
|
raise ValidationException(
|
||||||
@ -1463,18 +1449,14 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
for value in values:
|
for value in values:
|
||||||
if value not in ["Standard", "Advanced", "Intelligent-Tiering"]:
|
if value not in ["Standard", "Advanced", "Intelligent-Tiering"]:
|
||||||
raise InvalidFilterOption(
|
raise InvalidFilterOption(
|
||||||
"The following filter value is not valid: {value}. Valid values include: [Standard, Advanced, Intelligent-Tiering].".format(
|
f"The following filter value is not valid: {value}. Valid values include: [Standard, Advanced, Intelligent-Tiering]."
|
||||||
value=value
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if key == "Type":
|
if key == "Type":
|
||||||
for value in values:
|
for value in values:
|
||||||
if value not in ["String", "StringList", "SecureString"]:
|
if value not in ["String", "StringList", "SecureString"]:
|
||||||
raise InvalidFilterOption(
|
raise InvalidFilterOption(
|
||||||
"The following filter value is not valid: {value}. Valid values include: [String, StringList, SecureString].".format(
|
f"The following filter value is not valid: {value}. Valid values include: [String, StringList, SecureString]."
|
||||||
value=value
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
allowed_options = ["Equals", "BeginsWith"]
|
allowed_options = ["Equals", "BeginsWith"]
|
||||||
@ -1482,17 +1464,13 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
allowed_options += ["Contains"]
|
allowed_options += ["Contains"]
|
||||||
if key != "Path" and option not in allowed_options:
|
if key != "Path" and option not in allowed_options:
|
||||||
raise InvalidFilterOption(
|
raise InvalidFilterOption(
|
||||||
"The following filter option is not valid: {option}. Valid options include: [BeginsWith, Equals].".format(
|
f"The following filter option is not valid: {option}. Valid options include: [BeginsWith, Equals]."
|
||||||
option=option
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
filter_keys.append(key)
|
filter_keys.append(key)
|
||||||
|
|
||||||
def _format_error(self, key, value, constraint):
|
def _format_error(self, key, value, constraint):
|
||||||
return 'Value "{value}" at "{key}" failed to satisfy constraint: {constraint}'.format(
|
return f'Value "{value}" at "{key}" failed to satisfy constraint: {constraint}'
|
||||||
constraint=constraint, key=key, value=value
|
|
||||||
)
|
|
||||||
|
|
||||||
def _raise_errors(self):
|
def _raise_errors(self):
|
||||||
if self._errors:
|
if self._errors:
|
||||||
@ -1502,9 +1480,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
self._errors = [] # reset collected errors
|
self._errors = [] # reset collected errors
|
||||||
|
|
||||||
raise ValidationException(
|
raise ValidationException(
|
||||||
"{count} validation error{plural} detected: {errors}".format(
|
f"{count} validation error{plural} detected: {errors}"
|
||||||
count=count, plural=plural, errors=errors
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_all_parameters(self):
|
def get_all_parameters(self):
|
||||||
@ -1517,12 +1493,10 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
result = {}
|
result = {}
|
||||||
|
|
||||||
if len(names) > 10:
|
if len(names) > 10:
|
||||||
|
all_names = ", ".join(names)
|
||||||
raise ValidationException(
|
raise ValidationException(
|
||||||
"1 validation error detected: "
|
"1 validation error detected: "
|
||||||
"Value '[{}]' at 'names' failed to satisfy constraint: "
|
f"Value '[{all_names}]' at 'names' failed to satisfy constraint: Member must have length less than or equal to 10."
|
||||||
"Member must have length less than or equal to 10.".format(
|
|
||||||
", ".join(names)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for name in set(names):
|
for name in set(names):
|
||||||
@ -1577,10 +1551,8 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
if max_results > PARAMETER_HISTORY_MAX_RESULTS:
|
if max_results > PARAMETER_HISTORY_MAX_RESULTS:
|
||||||
raise ValidationException(
|
raise ValidationException(
|
||||||
"1 validation error detected: "
|
"1 validation error detected: "
|
||||||
"Value '{}' at 'maxResults' failed to satisfy constraint: "
|
f"Value '{max_results}' at 'maxResults' failed to satisfy constraint: "
|
||||||
"Member must have value less than or equal to {}.".format(
|
f"Member must have value less than or equal to {PARAMETER_HISTORY_MAX_RESULTS}."
|
||||||
max_results, PARAMETER_HISTORY_MAX_RESULTS
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if name in self._parameters:
|
if name in self._parameters:
|
||||||
@ -1710,7 +1682,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
def label_parameter_version(self, name, version, labels):
|
def label_parameter_version(self, name, version, labels):
|
||||||
previous_parameter_versions = self._parameters[name]
|
previous_parameter_versions = self._parameters[name]
|
||||||
if not previous_parameter_versions:
|
if not previous_parameter_versions:
|
||||||
raise ParameterNotFound("Parameter %s not found." % name)
|
raise ParameterNotFound(f"Parameter {name} not found.")
|
||||||
found_parameter = None
|
found_parameter = None
|
||||||
labels_needing_removal = []
|
labels_needing_removal = []
|
||||||
if not version:
|
if not version:
|
||||||
@ -1727,8 +1699,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
labels_needing_removal.append(label)
|
labels_needing_removal.append(label)
|
||||||
if not found_parameter:
|
if not found_parameter:
|
||||||
raise ParameterVersionNotFound(
|
raise ParameterVersionNotFound(
|
||||||
"Systems Manager could not find version %s of %s. "
|
f"Systems Manager could not find version {version} of {name}. Verify the version and try again."
|
||||||
"Verify the version and try again." % (version, name)
|
|
||||||
)
|
)
|
||||||
labels_to_append = []
|
labels_to_append = []
|
||||||
invalid_labels = []
|
invalid_labels = []
|
||||||
@ -1772,10 +1743,10 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
oldest_parameter = parameter_versions[0]
|
oldest_parameter = parameter_versions[0]
|
||||||
if oldest_parameter.labels:
|
if oldest_parameter.labels:
|
||||||
raise ParameterMaxVersionLimitExceeded(
|
raise ParameterMaxVersionLimitExceeded(
|
||||||
"You attempted to create a new version of %s by calling the PutParameter API "
|
f"You attempted to create a new version of {name} by calling the PutParameter API "
|
||||||
"with the overwrite flag. Version %d, the oldest version, can't be deleted "
|
f"with the overwrite flag. Version {oldest_parameter.version}, the oldest version, can't be deleted "
|
||||||
"because it has a label associated with it. Move the label to another version "
|
"because it has a label associated with it. Move the label to another version "
|
||||||
"of the parameter, and try again." % (name, oldest_parameter.version)
|
"of the parameter, and try again."
|
||||||
)
|
)
|
||||||
|
|
||||||
def put_parameter(
|
def put_parameter(
|
||||||
@ -1807,7 +1778,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
is_path = name.count("/") > 1
|
is_path = name.count("/") > 1
|
||||||
if name.lower().startswith("/aws") and is_path:
|
if name.lower().startswith("/aws") and is_path:
|
||||||
raise AccessDeniedException(
|
raise AccessDeniedException(
|
||||||
"No access to reserved parameter name: {name}.".format(name=name)
|
f"No access to reserved parameter name: {name}."
|
||||||
)
|
)
|
||||||
if not is_path:
|
if not is_path:
|
||||||
invalid_prefix_error = 'Parameter name: can\'t be prefixed with "aws" or "ssm" (case-insensitive).'
|
invalid_prefix_error = 'Parameter name: can\'t be prefixed with "aws" or "ssm" (case-insensitive).'
|
||||||
|
@ -160,7 +160,7 @@ class SimpleSystemManagerResponse(BaseResponse):
|
|||||||
if result is None:
|
if result is None:
|
||||||
error = {
|
error = {
|
||||||
"__type": "ParameterNotFound",
|
"__type": "ParameterNotFound",
|
||||||
"message": "Parameter {0} not found.".format(name),
|
"message": f"Parameter {name} not found.",
|
||||||
}
|
}
|
||||||
return json.dumps(error), dict(status=400)
|
return json.dumps(error), dict(status=400)
|
||||||
return json.dumps({})
|
return json.dumps({})
|
||||||
@ -195,7 +195,7 @@ class SimpleSystemManagerResponse(BaseResponse):
|
|||||||
if result is None:
|
if result is None:
|
||||||
error = {
|
error = {
|
||||||
"__type": "ParameterNotFound",
|
"__type": "ParameterNotFound",
|
||||||
"message": "Parameter {0} not found.".format(name),
|
"message": f"Parameter {name} not found.",
|
||||||
}
|
}
|
||||||
return json.dumps(error), dict(status=400)
|
return json.dumps(error), dict(status=400)
|
||||||
|
|
||||||
@ -296,7 +296,7 @@ class SimpleSystemManagerResponse(BaseResponse):
|
|||||||
if result is None:
|
if result is None:
|
||||||
error = {
|
error = {
|
||||||
"__type": "ParameterAlreadyExists",
|
"__type": "ParameterAlreadyExists",
|
||||||
"message": "Parameter {0} already exists.".format(name),
|
"message": f"Parameter {name} already exists.",
|
||||||
}
|
}
|
||||||
return json.dumps(error), dict(status=400)
|
return json.dumps(error), dict(status=400)
|
||||||
|
|
||||||
@ -316,7 +316,7 @@ class SimpleSystemManagerResponse(BaseResponse):
|
|||||||
if result is None:
|
if result is None:
|
||||||
error = {
|
error = {
|
||||||
"__type": "ParameterNotFound",
|
"__type": "ParameterNotFound",
|
||||||
"message": "Parameter {0} not found.".format(name),
|
"message": f"Parameter {name} not found.",
|
||||||
}
|
}
|
||||||
return json.dumps(error), dict(status=400)
|
return json.dumps(error), dict(status=400)
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ class InvalidToken(AWSError):
|
|||||||
STATUS = 400
|
STATUS = 400
|
||||||
|
|
||||||
def __init__(self, message="Invalid token"):
|
def __init__(self, message="Invalid token"):
|
||||||
super().__init__("Invalid Token: {}".format(message))
|
super().__init__(f"Invalid Token: {message}")
|
||||||
|
|
||||||
|
|
||||||
class ResourceNotFound(AWSError):
|
class ResourceNotFound(AWSError):
|
||||||
@ -44,4 +44,4 @@ class ResourceNotFound(AWSError):
|
|||||||
STATUS = 400
|
STATUS = 400
|
||||||
|
|
||||||
def __init__(self, arn):
|
def __init__(self, arn):
|
||||||
super().__init__("Resource not found: '{}'".format(arn))
|
super().__init__(f"Resource not found: '{arn}'")
|
||||||
|
@ -63,13 +63,7 @@ class AssumedRole(BaseModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def arn(self):
|
def arn(self):
|
||||||
return (
|
return f"arn:aws:sts::{self.account_id}:assumed-role/{self.role_arn.split('/')[-1]}/{self.session_name}"
|
||||||
"arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format(
|
|
||||||
account_id=self.account_id,
|
|
||||||
role_name=self.role_arn.split("/")[-1],
|
|
||||||
session_name=self.session_name,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class STSBackend(BaseBackend):
|
class STSBackend(BaseBackend):
|
||||||
|
@ -28,7 +28,7 @@ class TokenResponse(BaseResponse):
|
|||||||
"1 validation error detected: Value "
|
"1 validation error detected: Value "
|
||||||
'\'{"Version": "2012-10-17", "Statement": [...]}\' '
|
'\'{"Version": "2012-10-17", "Statement": [...]}\' '
|
||||||
"at 'policy' failed to satisfy constraint: Member must have length less than or "
|
"at 'policy' failed to satisfy constraint: Member must have length less than or "
|
||||||
" equal to %s" % MAX_FEDERATION_TOKEN_POLICY_LENGTH
|
f" equal to {MAX_FEDERATION_TOKEN_POLICY_LENGTH}"
|
||||||
)
|
)
|
||||||
|
|
||||||
name = self.querystring.get("Name")[0]
|
name = self.querystring.get("Name")[0]
|
||||||
|
@ -166,7 +166,7 @@ class SupportBackend(BaseBackend):
|
|||||||
random_case_id = "".join(
|
random_case_id = "".join(
|
||||||
random.choice("0123456789ABCDEFGHIJKLMabcdefghijklm") for i in range(16)
|
random.choice("0123456789ABCDEFGHIJKLMabcdefghijklm") for i in range(16)
|
||||||
)
|
)
|
||||||
case_id = "case-12345678910-2020-%s" % random_case_id
|
case_id = f"case-12345678910-2020-{random_case_id}"
|
||||||
case = SupportCase(
|
case = SupportCase(
|
||||||
case_id=case_id,
|
case_id=case_id,
|
||||||
subject=subject,
|
subject=subject,
|
||||||
|
@ -8,9 +8,9 @@ class SWFClientError(JsonRESTError):
|
|||||||
class SWFUnknownResourceFault(SWFClientError):
|
class SWFUnknownResourceFault(SWFClientError):
|
||||||
def __init__(self, resource_type, resource_name=None):
|
def __init__(self, resource_type, resource_name=None):
|
||||||
if resource_name:
|
if resource_name:
|
||||||
message = "Unknown {0}: {1}".format(resource_type, resource_name)
|
message = f"Unknown {resource_type}: {resource_name}"
|
||||||
else:
|
else:
|
||||||
message = "Unknown {0}".format(resource_type)
|
message = f"Unknown {resource_type}"
|
||||||
super().__init__("com.amazonaws.swf.base.model#UnknownResourceFault", message)
|
super().__init__("com.amazonaws.swf.base.model#UnknownResourceFault", message)
|
||||||
|
|
||||||
|
|
||||||
@ -31,7 +31,7 @@ class SWFDomainDeprecatedFault(SWFClientError):
|
|||||||
class SWFSerializationException(SWFClientError):
|
class SWFSerializationException(SWFClientError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
message = "class java.lang.Foo can not be converted to an String "
|
message = "class java.lang.Foo can not be converted to an String "
|
||||||
message += " (not a real SWF exception ; happened on: {0})".format(value)
|
message += f" (not a real SWF exception ; happened on: {value})"
|
||||||
__type = "com.amazonaws.swf.base.model#SerializationException"
|
__type = "com.amazonaws.swf.base.model#SerializationException"
|
||||||
super().__init__(__type, message)
|
super().__init__(__type, message)
|
||||||
|
|
||||||
@ -40,9 +40,7 @@ class SWFTypeAlreadyExistsFault(SWFClientError):
|
|||||||
def __init__(self, _type):
|
def __init__(self, _type):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"com.amazonaws.swf.base.model#TypeAlreadyExistsFault",
|
"com.amazonaws.swf.base.model#TypeAlreadyExistsFault",
|
||||||
"{0}=[name={1}, version={2}]".format(
|
f"{_type.__class__.__name__}=[name={_type.name}, version={_type.version}]",
|
||||||
_type.__class__.__name__, _type.name, _type.version
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -50,9 +48,7 @@ class SWFTypeDeprecatedFault(SWFClientError):
|
|||||||
def __init__(self, _type):
|
def __init__(self, _type):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"com.amazonaws.swf.base.model#TypeDeprecatedFault",
|
"com.amazonaws.swf.base.model#TypeDeprecatedFault",
|
||||||
"{0}=[name={1}, version={2}]".format(
|
f"{_type.__class__.__name__}=[name={_type.name}, version={_type.version}]",
|
||||||
_type.__class__.__name__, _type.name, _type.version
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -88,19 +84,15 @@ class SWFDecisionValidationException(SWFClientError):
|
|||||||
for pb in problems:
|
for pb in problems:
|
||||||
if pb["type"] == "null_value":
|
if pb["type"] == "null_value":
|
||||||
messages.append(
|
messages.append(
|
||||||
"Value null at '%(where)s' failed to satisfy constraint: "
|
f"Value null at '{pb['where']}' failed to satisfy constraint: Member must not be null"
|
||||||
"Member must not be null" % pb
|
|
||||||
)
|
)
|
||||||
elif pb["type"] == "bad_decision_type":
|
elif pb["type"] == "bad_decision_type":
|
||||||
messages.append(
|
messages.append(
|
||||||
"Value '%(value)s' at '%(where)s' failed to satisfy constraint: "
|
f"Value '{pb['value']}' at '{pb['where']}' failed to satisfy constraint: "
|
||||||
"Member must satisfy enum value set: "
|
f"Member must satisfy enum value set: [{pb['possible_values']}]"
|
||||||
"[%(possible_values)s]" % pb
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(f"Unhandled decision constraint type: {pb['type']}")
|
||||||
"Unhandled decision constraint type: {0}".format(pb["type"])
|
|
||||||
)
|
|
||||||
# prefix
|
# prefix
|
||||||
count = len(problems)
|
count = len(problems)
|
||||||
if count < 2:
|
if count < 2:
|
||||||
|
@ -168,7 +168,7 @@ class SWFBackend(BaseBackend):
|
|||||||
workflow_version,
|
workflow_version,
|
||||||
tag_list=None,
|
tag_list=None,
|
||||||
workflow_input=None,
|
workflow_input=None,
|
||||||
**kwargs
|
**kwargs,
|
||||||
):
|
):
|
||||||
domain = self._get_domain(domain_name)
|
domain = self._get_domain(domain_name)
|
||||||
wf_type = domain.get_type("workflow", workflow_name, workflow_version)
|
wf_type = domain.get_type("workflow", workflow_name, workflow_version)
|
||||||
@ -180,7 +180,7 @@ class SWFBackend(BaseBackend):
|
|||||||
workflow_id,
|
workflow_id,
|
||||||
tag_list=tag_list,
|
tag_list=tag_list,
|
||||||
workflow_input=workflow_input,
|
workflow_input=workflow_input,
|
||||||
**kwargs
|
**kwargs,
|
||||||
)
|
)
|
||||||
domain.add_workflow_execution(wfe)
|
domain.add_workflow_execution(wfe)
|
||||||
wfe.start()
|
wfe.start()
|
||||||
@ -284,17 +284,13 @@ class SWFBackend(BaseBackend):
|
|||||||
if not wfe.open:
|
if not wfe.open:
|
||||||
raise SWFUnknownResourceFault(
|
raise SWFUnknownResourceFault(
|
||||||
"execution",
|
"execution",
|
||||||
"WorkflowExecution=[workflowId={0}, runId={1}]".format(
|
f"WorkflowExecution=[workflowId={wfe.workflow_id}, runId={wfe.run_id}]",
|
||||||
wfe.workflow_id, wfe.run_id
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
# decision task found, but already completed
|
# decision task found, but already completed
|
||||||
if decision_task.state != "STARTED":
|
if decision_task.state != "STARTED":
|
||||||
if decision_task.state == "COMPLETED":
|
if decision_task.state == "COMPLETED":
|
||||||
raise SWFUnknownResourceFault(
|
raise SWFUnknownResourceFault(
|
||||||
"decision task, scheduledEventId = {0}".format(
|
f"decision task, scheduledEventId = {decision_task.scheduled_event_id}"
|
||||||
decision_task.scheduled_event_id
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
@ -375,17 +371,13 @@ class SWFBackend(BaseBackend):
|
|||||||
if not wfe.open:
|
if not wfe.open:
|
||||||
raise SWFUnknownResourceFault(
|
raise SWFUnknownResourceFault(
|
||||||
"execution",
|
"execution",
|
||||||
"WorkflowExecution=[workflowId={0}, runId={1}]".format(
|
f"WorkflowExecution=[workflowId={wfe.workflow_id}, runId={wfe.run_id}]",
|
||||||
wfe.workflow_id, wfe.run_id
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
# activity task found, but already completed
|
# activity task found, but already completed
|
||||||
if activity_task.state != "STARTED":
|
if activity_task.state != "STARTED":
|
||||||
if activity_task.state == "COMPLETED":
|
if activity_task.state == "COMPLETED":
|
||||||
raise SWFUnknownResourceFault(
|
raise SWFUnknownResourceFault(
|
||||||
"activity, scheduledEventId = {0}".format(
|
f"activity, scheduledEventId = {activity_task.scheduled_event_id}"
|
||||||
activity_task.scheduled_event_id
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
@ -26,7 +26,7 @@ class Domain(BaseModel):
|
|||||||
self.decision_task_lists = {}
|
self.decision_task_lists = {}
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "Domain(name: %(name)s, status: %(status)s)" % self.__dict__
|
return f"Domain(name: {self.name}, status: {self.status})"
|
||||||
|
|
||||||
def to_short_dict(self):
|
def to_short_dict(self):
|
||||||
hsh = {"name": self.name, "status": self.status}
|
hsh = {"name": self.name, "status": self.status}
|
||||||
@ -50,9 +50,7 @@ class Domain(BaseModel):
|
|||||||
if not ignore_empty:
|
if not ignore_empty:
|
||||||
raise SWFUnknownResourceFault(
|
raise SWFUnknownResourceFault(
|
||||||
"type",
|
"type",
|
||||||
"{0}Type=[name={1}, version={2}]".format(
|
f"{kind.capitalize()}Type=[name={name}, version={version}]",
|
||||||
kind.capitalize(), name, version
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_type(self, _type):
|
def add_type(self, _type):
|
||||||
@ -97,12 +95,10 @@ class Domain(BaseModel):
|
|||||||
if run_id:
|
if run_id:
|
||||||
args = [
|
args = [
|
||||||
"execution",
|
"execution",
|
||||||
"WorkflowExecution=[workflowId={0}, runId={1}]".format(
|
f"WorkflowExecution=[workflowId={workflow_id}, runId={run_id}]",
|
||||||
workflow_id, run_id
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
args = ["execution, workflowId = {0}".format(workflow_id)]
|
args = [f"execution, workflowId = {workflow_id}"]
|
||||||
raise SWFUnknownResourceFault(*args)
|
raise SWFUnknownResourceFault(*args)
|
||||||
# at last return workflow execution
|
# at last return workflow execution
|
||||||
return wfe
|
return wfe
|
||||||
|
@ -21,10 +21,8 @@ class GenericType(BaseModel):
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
cls = self.__class__.__name__
|
cls = self.__class__.__name__
|
||||||
attrs = (
|
attrs = f"name: {self.name}, version: {self.version}, status: {self.status}"
|
||||||
"name: %(name)s, version: %(version)s, status: %(status)s" % self.__dict__
|
return f"{cls}({attrs})"
|
||||||
)
|
|
||||||
return "{0}({1})".format(cls, attrs)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def kind(self):
|
def kind(self):
|
||||||
@ -39,7 +37,7 @@ class GenericType(BaseModel):
|
|||||||
|
|
||||||
def to_medium_dict(self):
|
def to_medium_dict(self):
|
||||||
hsh = {
|
hsh = {
|
||||||
"{0}Type".format(self.kind): self.to_short_dict(),
|
f"{self.kind}Type": self.to_short_dict(),
|
||||||
"creationDate": 1420066800,
|
"creationDate": 1420066800,
|
||||||
"status": self.status,
|
"status": self.status,
|
||||||
}
|
}
|
||||||
|
@ -39,9 +39,7 @@ class HistoryEvent(BaseModel):
|
|||||||
def __init__(self, event_id, event_type, event_timestamp=None, **kwargs):
|
def __init__(self, event_id, event_type, event_timestamp=None, **kwargs):
|
||||||
if event_type not in SUPPORTED_HISTORY_EVENT_TYPES:
|
if event_type not in SUPPORTED_HISTORY_EVENT_TYPES:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"HistoryEvent does not implement attributes for type '{0}'".format(
|
f"HistoryEvent does not implement attributes for type '{event_type}'"
|
||||||
event_type
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self.event_id = event_id
|
self.event_id = event_id
|
||||||
self.event_type = event_type
|
self.event_type = event_type
|
||||||
@ -71,5 +69,5 @@ class HistoryEvent(BaseModel):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _attributes_key(self):
|
def _attributes_key(self):
|
||||||
key = "{0}EventAttributes".format(self.event_type)
|
key = f"{self.event_type}EventAttributes"
|
||||||
return decapitalize(key)
|
return decapitalize(key)
|
||||||
|
@ -89,7 +89,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
self._timers = {}
|
self._timers = {}
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "WorkflowExecution(run_id: {0})".format(self.run_id)
|
return f"WorkflowExecution(run_id: {self.run_id})"
|
||||||
|
|
||||||
def _set_from_kwargs_or_workflow_type(
|
def _set_from_kwargs_or_workflow_type(
|
||||||
self, kwargs, local_key, workflow_type_key=None
|
self, kwargs, local_key, workflow_type_key=None
|
||||||
@ -306,7 +306,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
for dt in self.decision_tasks:
|
for dt in self.decision_tasks:
|
||||||
if dt.task_token == task_token:
|
if dt.task_token == task_token:
|
||||||
return dt
|
return dt
|
||||||
raise ValueError("No decision task with token: {0}".format(task_token))
|
raise ValueError(f"No decision task with token: {task_token}")
|
||||||
|
|
||||||
def start_decision_task(self, task_token, identity=None):
|
def start_decision_task(self, task_token, identity=None):
|
||||||
dt = self._find_decision_task(task_token)
|
dt = self._find_decision_task(task_token)
|
||||||
@ -349,9 +349,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
problems.append(
|
problems.append(
|
||||||
{
|
{
|
||||||
"type": "null_value",
|
"type": "null_value",
|
||||||
"where": "decisions.{0}.member.{1}.{2}".format(
|
"where": f"decisions.{decision_id}.member.{kind}.{key}",
|
||||||
decision_id, kind, key
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return problems
|
return problems
|
||||||
@ -385,9 +383,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
attrs_to_check = [d for d in dcs.keys() if d.endswith("DecisionAttributes")]
|
attrs_to_check = [d for d in dcs.keys() if d.endswith("DecisionAttributes")]
|
||||||
if dcs["decisionType"] in self.KNOWN_DECISION_TYPES:
|
if dcs["decisionType"] in self.KNOWN_DECISION_TYPES:
|
||||||
decision_type = dcs["decisionType"]
|
decision_type = dcs["decisionType"]
|
||||||
decision_attr = "{0}DecisionAttributes".format(
|
decision_attr = f"{decapitalize(decision_type)}DecisionAttributes"
|
||||||
decapitalize(decision_type)
|
|
||||||
)
|
|
||||||
attrs_to_check.append(decision_attr)
|
attrs_to_check.append(decision_attr)
|
||||||
for attr in attrs_to_check:
|
for attr in attrs_to_check:
|
||||||
problems += self._check_decision_attributes(
|
problems += self._check_decision_attributes(
|
||||||
@ -399,9 +395,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
{
|
{
|
||||||
"type": "bad_decision_type",
|
"type": "bad_decision_type",
|
||||||
"value": dcs["decisionType"],
|
"value": dcs["decisionType"],
|
||||||
"where": "decisions.{0}.member.decisionType".format(
|
"where": f"decisions.{decision_number}.member.decisionType",
|
||||||
decision_number
|
|
||||||
),
|
|
||||||
"possible_values": ", ".join(self.KNOWN_DECISION_TYPES),
|
"possible_values": ", ".join(self.KNOWN_DECISION_TYPES),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -418,7 +412,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
# handle each decision separately, in order
|
# handle each decision separately, in order
|
||||||
for decision in decisions:
|
for decision in decisions:
|
||||||
decision_type = decision["decisionType"]
|
decision_type = decision["decisionType"]
|
||||||
attributes_key = "{0}DecisionAttributes".format(decapitalize(decision_type))
|
attributes_key = f"{decapitalize(decision_type)}DecisionAttributes"
|
||||||
attributes = decision.get(attributes_key, {})
|
attributes = decision.get(attributes_key, {})
|
||||||
if decision_type == "CompleteWorkflowExecution":
|
if decision_type == "CompleteWorkflowExecution":
|
||||||
self.complete(event_id, attributes.get("result"))
|
self.complete(event_id, attributes.get("result"))
|
||||||
@ -441,9 +435,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
# TODO: implement Decision type: ScheduleLambdaFunction
|
# TODO: implement Decision type: ScheduleLambdaFunction
|
||||||
# TODO: implement Decision type: SignalExternalWorkflowExecution
|
# TODO: implement Decision type: SignalExternalWorkflowExecution
|
||||||
# TODO: implement Decision type: StartChildWorkflowExecution
|
# TODO: implement Decision type: StartChildWorkflowExecution
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(f"Cannot handle decision: {decision_type}")
|
||||||
"Cannot handle decision: {0}".format(decision_type)
|
|
||||||
)
|
|
||||||
|
|
||||||
# finally decrement counter if and only if everything went well
|
# finally decrement counter if and only if everything went well
|
||||||
self.open_counts["openDecisionTasks"] -= 1
|
self.open_counts["openDecisionTasks"] -= 1
|
||||||
@ -553,7 +545,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
if not timeouts[_type]:
|
if not timeouts[_type]:
|
||||||
error_key = default_key.replace("default_task_", "default_")
|
error_key = default_key.replace("default_task_", "default_")
|
||||||
fail_schedule_activity_task(
|
fail_schedule_activity_task(
|
||||||
activity_type, "{0}_UNDEFINED".format(error_key.upper())
|
activity_type, f"{error_key.upper()}_UNDEFINED"
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -588,7 +580,7 @@ class WorkflowExecution(BaseModel):
|
|||||||
for task in self.activity_tasks:
|
for task in self.activity_tasks:
|
||||||
if task.task_token == task_token:
|
if task.task_token == task_token:
|
||||||
return task
|
return task
|
||||||
raise ValueError("No activity task with token: {0}".format(task_token))
|
raise ValueError(f"No activity task with token: {task_token}")
|
||||||
|
|
||||||
def start_activity_task(self, task_token, identity=None):
|
def start_activity_task(self, task_token, identity=None):
|
||||||
task = self._find_activity_task(task_token)
|
task = self._find_activity_task(task_token)
|
||||||
|
@ -52,12 +52,9 @@ class SWFResponse(BaseResponse):
|
|||||||
return
|
return
|
||||||
keys = kwargs.keys()
|
keys = kwargs.keys()
|
||||||
if len(keys) == 2:
|
if len(keys) == 2:
|
||||||
message = "Cannot specify both a {0} and a {1}".format(keys[0], keys[1])
|
message = f"Cannot specify both a {keys[0]} and a {keys[1]}"
|
||||||
else:
|
else:
|
||||||
message = (
|
message = f"Cannot specify more than one exclusive filters in the same query: {keys}"
|
||||||
"Cannot specify more than one exclusive filters in the"
|
|
||||||
" same query: {0}".format(keys)
|
|
||||||
)
|
|
||||||
raise SWFValidationException(message)
|
raise SWFValidationException(message)
|
||||||
|
|
||||||
def _list_types(self, kind):
|
def _list_types(self, kind):
|
||||||
@ -73,7 +70,7 @@ class SWFResponse(BaseResponse):
|
|||||||
|
|
||||||
def _describe_type(self, kind):
|
def _describe_type(self, kind):
|
||||||
domain = self._params["domain"]
|
domain = self._params["domain"]
|
||||||
_type_args = self._params["{0}Type".format(kind)]
|
_type_args = self._params[f"{kind}Type"]
|
||||||
name = _type_args["name"]
|
name = _type_args["name"]
|
||||||
version = _type_args["version"]
|
version = _type_args["version"]
|
||||||
self._check_string(domain)
|
self._check_string(domain)
|
||||||
@ -85,7 +82,7 @@ class SWFResponse(BaseResponse):
|
|||||||
|
|
||||||
def _deprecate_type(self, kind):
|
def _deprecate_type(self, kind):
|
||||||
domain = self._params["domain"]
|
domain = self._params["domain"]
|
||||||
_type_args = self._params["{0}Type".format(kind)]
|
_type_args = self._params[f"{kind}Type"]
|
||||||
name = _type_args["name"]
|
name = _type_args["name"]
|
||||||
version = _type_args["version"]
|
version = _type_args["version"]
|
||||||
self._check_string(domain)
|
self._check_string(domain)
|
||||||
@ -96,7 +93,7 @@ class SWFResponse(BaseResponse):
|
|||||||
|
|
||||||
def _undeprecate_type(self, kind):
|
def _undeprecate_type(self, kind):
|
||||||
domain = self._params["domain"]
|
domain = self._params["domain"]
|
||||||
_type_args = self._params["{0}Type".format(kind)]
|
_type_args = self._params[f"{kind}Type"]
|
||||||
name = _type_args["name"]
|
name = _type_args["name"]
|
||||||
version = _type_args["version"]
|
version = _type_args["version"]
|
||||||
self._check_string(domain)
|
self._check_string(domain)
|
||||||
|
Loading…
Reference in New Issue
Block a user