Techdebt: Replace string-format with f-strings (for tests dirs) (#5678)
This commit is contained in:
parent
68cf3e8a1d
commit
1a8ddc0f2b
@ -1 +1 @@
|
||||
Subproject commit f9a6db6e3c3f3299701747972fd6c37ba4af36f4
|
||||
Subproject commit 01a50d07400ee7513b31ec10e9832a2d8290b4e2
|
@ -511,7 +511,7 @@ def test_add_too_many_tags():
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.add_tags_to_certificate(
|
||||
CertificateArn=arn,
|
||||
Tags=[{"Key": "a-%d" % i, "Value": "abcd"} for i in range(1, 52)],
|
||||
Tags=[{"Key": f"a-{i}", "Value": "abcd"} for i in range(1, 52)],
|
||||
)
|
||||
ex.value.response["Error"]["Code"].should.equal("TooManyTagsException")
|
||||
ex.value.response["Error"]["Message"].should.contain("contains too many Tags")
|
||||
@ -520,7 +520,7 @@ def test_add_too_many_tags():
|
||||
# Add 49 tags first, then try to add 2 more.
|
||||
client.add_tags_to_certificate(
|
||||
CertificateArn=arn,
|
||||
Tags=[{"Key": "p-%d" % i, "Value": "pqrs"} for i in range(1, 50)],
|
||||
Tags=[{"Key": f"p-{i}", "Value": "pqrs"} for i in range(1, 50)],
|
||||
)
|
||||
client.list_tags_for_certificate(CertificateArn=arn)["Tags"].should.have.length_of(
|
||||
49
|
||||
|
@ -931,12 +931,8 @@ def test_create_authorizer():
|
||||
response["ResponseMetadata"].pop("HTTPHeaders", None)
|
||||
response["ResponseMetadata"].pop("RetryAttempts", None)
|
||||
|
||||
response["items"][0]["id"].should.match(
|
||||
r"{0}|{1}".format(authorizer_id2, authorizer_id)
|
||||
)
|
||||
response["items"][1]["id"].should.match(
|
||||
r"{0}|{1}".format(authorizer_id2, authorizer_id)
|
||||
)
|
||||
response["items"][0]["id"].should.match(rf"{authorizer_id2}|{authorizer_id}")
|
||||
response["items"][1]["id"].should.match(rf"{authorizer_id2}|{authorizer_id}")
|
||||
|
||||
new_authorizer_name_with_vars = "authorizer_with_vars"
|
||||
response = client.create_authorizer(
|
||||
@ -1186,9 +1182,7 @@ def test_put_integration_validation():
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
credentials="arn:aws:iam::{}:role/service-role/testfunction-role-oe783psq".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
credentials=f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/testfunction-role-oe783psq",
|
||||
httpMethod="GET",
|
||||
type=_type,
|
||||
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
|
||||
@ -1210,9 +1204,7 @@ def test_put_integration_validation():
|
||||
client.put_integration(
|
||||
restApiId=api_id,
|
||||
resourceId=root_id,
|
||||
credentials="arn:aws:iam::{}:role/service-role/testfunction-role-oe783psq".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
credentials=f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/testfunction-role-oe783psq",
|
||||
httpMethod="GET",
|
||||
type=_type,
|
||||
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
|
||||
|
@ -367,7 +367,7 @@ def test_simple_apigateway_with_lambda_proxy():
|
||||
statement = policy["Statement"][0]
|
||||
statement["FunctionName"].should.contain(fn_name)
|
||||
statement["Condition"]["ArnLike"]["AWS:SourceArn"].should.equal(
|
||||
"arn:aws:execute-api:us-east-1:123456789012:{}/*/*".format(api_id)
|
||||
f"arn:aws:execute-api:us-east-1:123456789012:{api_id}/*/*"
|
||||
)
|
||||
|
||||
|
||||
|
@ -143,12 +143,8 @@ def test_create_multiple_deployments():
|
||||
|
||||
response = client.get_deployments(restApiId=api_id)
|
||||
|
||||
response["items"][0]["id"].should.match(
|
||||
r"{0}|{1}".format(deployment_id2, deployment_id)
|
||||
)
|
||||
response["items"][1]["id"].should.match(
|
||||
r"{0}|{1}".format(deployment_id2, deployment_id)
|
||||
)
|
||||
response["items"][0]["id"].should.match(rf"{deployment_id2}|{deployment_id}")
|
||||
response["items"][1]["id"].should.match(rf"{deployment_id2}|{deployment_id}")
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
|
@ -34,16 +34,16 @@ def test_usage_plans_apis():
|
||||
json.loads(res.data)["item"].should.have.length_of(1)
|
||||
|
||||
# Get single usage plan
|
||||
res = test_client.get("/usageplans/{0}".format(created_plan["id"]))
|
||||
res = test_client.get(f"/usageplans/{created_plan['id']}")
|
||||
fetched_plan = json.loads(res.data)
|
||||
fetched_plan.should.equal(created_plan)
|
||||
|
||||
# Not existing usage plan
|
||||
res = test_client.get("/usageplans/{0}".format("not_existing"))
|
||||
res = test_client.get("/usageplans/not_existing")
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
# Delete usage plan
|
||||
res = test_client.delete("/usageplans/{0}".format(created_plan["id"]))
|
||||
res = test_client.delete(f"/usageplans/{created_plan['id']}")
|
||||
res.data.should.equal(b"{}")
|
||||
|
||||
# List usage plans (expect empty again)
|
||||
@ -61,53 +61,45 @@ def test_usage_plans_keys():
|
||||
created_api_key = json.loads(res.data)
|
||||
|
||||
# List usage plans keys (expect empty)
|
||||
res = test_client.get("/usageplans/{0}/keys".format(usage_plan_id))
|
||||
res = test_client.get(f"/usageplans/{usage_plan_id}/keys")
|
||||
json.loads(res.data)["item"].should.have.length_of(0)
|
||||
|
||||
# Invalid api key (does not exists at all)
|
||||
res = test_client.get(
|
||||
"/usageplans/{0}/keys/{1}".format(usage_plan_id, "not_existing")
|
||||
)
|
||||
res = test_client.get(f"/usageplans/{usage_plan_id}/keys/not_existing")
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
# not existing usage plan with existing api key
|
||||
res = test_client.get(
|
||||
"/usageplans/{0}/keys/{1}".format("not_existing", created_api_key["id"])
|
||||
)
|
||||
res = test_client.get(f"/usageplans/not_existing/keys/{created_api_key['id']}")
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
# not jet added api key
|
||||
res = test_client.get(
|
||||
"/usageplans/{0}/keys/{1}".format(usage_plan_id, created_api_key["id"])
|
||||
)
|
||||
res = test_client.get(f"/usageplans/{usage_plan_id}/keys/{created_api_key['id']}")
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
# Create usage plan key
|
||||
res = test_client.post(
|
||||
"/usageplans/{0}/keys".format(usage_plan_id),
|
||||
f"/usageplans/{usage_plan_id}/keys",
|
||||
data=json.dumps({"keyId": created_api_key["id"], "keyType": "API_KEY"}),
|
||||
)
|
||||
created_usage_plan_key = json.loads(res.data)
|
||||
|
||||
# List usage plans keys (expect 1 key)
|
||||
res = test_client.get("/usageplans/{0}/keys".format(usage_plan_id))
|
||||
res = test_client.get(f"/usageplans/{usage_plan_id}/keys")
|
||||
json.loads(res.data)["item"].should.have.length_of(1)
|
||||
|
||||
# Get single usage plan key
|
||||
res = test_client.get(
|
||||
"/usageplans/{0}/keys/{1}".format(usage_plan_id, created_api_key["id"])
|
||||
)
|
||||
res = test_client.get(f"/usageplans/{usage_plan_id}/keys/{created_api_key['id']}")
|
||||
fetched_plan_key = json.loads(res.data)
|
||||
fetched_plan_key.should.equal(created_usage_plan_key)
|
||||
|
||||
# Delete usage plan key
|
||||
res = test_client.delete(
|
||||
"/usageplans/{0}/keys/{1}".format(usage_plan_id, created_api_key["id"])
|
||||
f"/usageplans/{usage_plan_id}/keys/{created_api_key['id']}"
|
||||
)
|
||||
res.data.should.equal(b"{}")
|
||||
|
||||
# List usage plans keys (expect to be empty again)
|
||||
res = test_client.get("/usageplans/{0}/keys".format(usage_plan_id))
|
||||
res = test_client.get(f"/usageplans/{usage_plan_id}/keys")
|
||||
json.loads(res.data)["item"].should.have.length_of(0)
|
||||
|
||||
|
||||
@ -118,7 +110,7 @@ def test_create_usage_plans_key_non_existent_api_key():
|
||||
|
||||
# Create usage plan key with non-existent api key
|
||||
res = test_client.post(
|
||||
"/usageplans/{0}/keys".format(usage_plan_id),
|
||||
f"/usageplans/{usage_plan_id}/keys",
|
||||
data=json.dumps({"keyId": "non-existent", "keyType": "API_KEY"}),
|
||||
)
|
||||
res.status_code.should.equal(404)
|
||||
|
@ -9,7 +9,7 @@ DEFAULT_ECS_CLUSTER = "default"
|
||||
DEFAULT_ECS_TASK = "test_ecs_task"
|
||||
DEFAULT_ECS_SERVICE = "sample-webapp"
|
||||
DEFAULT_SERVICE_NAMESPACE = "ecs"
|
||||
DEFAULT_RESOURCE_ID = "service/{}/{}".format(DEFAULT_ECS_CLUSTER, DEFAULT_ECS_SERVICE)
|
||||
DEFAULT_RESOURCE_ID = f"service/{DEFAULT_ECS_CLUSTER}/{DEFAULT_ECS_SERVICE}"
|
||||
DEFAULT_SCALABLE_DIMENSION = "ecs:service:DesiredCount"
|
||||
DEFAULT_MIN_CAPACITY = 1
|
||||
DEFAULT_MAX_CAPACITY = 1
|
||||
@ -122,12 +122,12 @@ def test_describe_scalable_targets_only_return_ecs_targets():
|
||||
register_scalable_target(
|
||||
client,
|
||||
ServiceNamespace="ecs",
|
||||
ResourceId="service/{}/test1".format(DEFAULT_ECS_CLUSTER),
|
||||
ResourceId=f"service/{DEFAULT_ECS_CLUSTER}/test1",
|
||||
)
|
||||
register_scalable_target(
|
||||
client,
|
||||
ServiceNamespace="ecs",
|
||||
ResourceId="service/{}/test2".format(DEFAULT_ECS_CLUSTER),
|
||||
ResourceId=f"service/{DEFAULT_ECS_CLUSTER}/test2",
|
||||
)
|
||||
register_scalable_target(
|
||||
client,
|
||||
@ -158,7 +158,7 @@ def test_describe_scalable_targets_next_token_success():
|
||||
register_scalable_target(
|
||||
client,
|
||||
ServiceNamespace="ecs",
|
||||
ResourceId="service/{}/{}".format(DEFAULT_ECS_CLUSTER, i),
|
||||
ResourceId=f"service/{DEFAULT_ECS_CLUSTER}/{i}",
|
||||
)
|
||||
response = client.describe_scalable_targets(
|
||||
ServiceNamespace=DEFAULT_SERVICE_NAMESPACE
|
||||
@ -379,9 +379,7 @@ def test_put_scaling_policy(policy_type, policy_body_kwargs):
|
||||
)
|
||||
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
response["PolicyARN"].should.match(
|
||||
r"arn:aws:autoscaling:.*1:scalingPolicy:.*:resource/{}/{}:policyName/{}".format(
|
||||
namespace, resource_id, policy_name
|
||||
)
|
||||
rf"arn:aws:autoscaling:.*1:scalingPolicy:.*:resource/{namespace}/{resource_id}:policyName/{policy_name}"
|
||||
)
|
||||
|
||||
|
||||
@ -434,9 +432,7 @@ def test_describe_scaling_policies():
|
||||
policy["PolicyType"].should.equal(policy_type)
|
||||
policy["TargetTrackingScalingPolicyConfiguration"].should.equal(policy_body)
|
||||
policy["PolicyARN"].should.match(
|
||||
r"arn:aws:autoscaling:.*1:scalingPolicy:.*:resource/{}/{}:policyName/{}".format(
|
||||
namespace, resource_id, policy_name
|
||||
)
|
||||
rf"arn:aws:autoscaling:.*1:scalingPolicy:.*:resource/{namespace}/{resource_id}:policyName/{policy_name}"
|
||||
)
|
||||
policy.should.have.key("CreationTime").which.should.be.a("datetime.datetime")
|
||||
|
||||
|
@ -12,7 +12,7 @@ DEFAULT_ECS_CLUSTER = "default"
|
||||
DEFAULT_ECS_TASK = "test_ecs_task"
|
||||
DEFAULT_ECS_SERVICE = "sample-webapp"
|
||||
DEFAULT_SERVICE_NAMESPACE = "ecs"
|
||||
DEFAULT_RESOURCE_ID = "service/{}/{}".format(DEFAULT_ECS_CLUSTER, DEFAULT_ECS_SERVICE)
|
||||
DEFAULT_RESOURCE_ID = f"service/{DEFAULT_ECS_CLUSTER}/{DEFAULT_ECS_SERVICE}"
|
||||
DEFAULT_SCALABLE_DIMENSION = "ecs:service:DesiredCount"
|
||||
DEFAULT_MIN_CAPACITY = 1
|
||||
DEFAULT_MAX_CAPACITY = 1
|
||||
@ -71,7 +71,7 @@ def test_describe_scalable_targets_with_multiple_invalid_parameters_should_retur
|
||||
@mock_applicationautoscaling
|
||||
def test_register_scalable_target_ecs_with_non_existent_service_should_return_clusternotfound_exception():
|
||||
client = boto3.client("application-autoscaling", region_name=DEFAULT_REGION)
|
||||
resource_id = "service/{}/foo".format(DEFAULT_ECS_CLUSTER)
|
||||
resource_id = f"service/{DEFAULT_ECS_CLUSTER}/foo"
|
||||
|
||||
with pytest.raises(ClientError) as ex:
|
||||
register_scalable_target(client, ServiceNamespace="ecs", ResourceId=resource_id)
|
||||
|
@ -99,7 +99,7 @@ def test_create_autoscaling_group_from_invalid_instance_id():
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("ValidationError")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Instance [{0}] is invalid.".format(invalid_instance_id)
|
||||
f"Instance [{invalid_instance_id}] is invalid."
|
||||
)
|
||||
|
||||
|
||||
@ -518,9 +518,7 @@ def test_update_autoscaling_group_launch_config():
|
||||
AutoScalingGroupName="test_asg",
|
||||
LaunchConfigurationName="test_launch_configuration_new",
|
||||
MinSize=1,
|
||||
VPCZoneIdentifier="{subnet1},{subnet2}".format(
|
||||
subnet1=mocked_networking["subnet1"], subnet2=mocked_networking["subnet2"]
|
||||
),
|
||||
VPCZoneIdentifier=f"{mocked_networking['subnet1']},{mocked_networking['subnet2']}",
|
||||
NewInstancesProtectedFromScaleIn=False,
|
||||
)
|
||||
|
||||
@ -566,9 +564,7 @@ def test_update_autoscaling_group_launch_template():
|
||||
"Version": "1",
|
||||
},
|
||||
MinSize=1,
|
||||
VPCZoneIdentifier="{subnet1},{subnet2}".format(
|
||||
subnet1=mocked_networking["subnet1"], subnet2=mocked_networking["subnet2"]
|
||||
),
|
||||
VPCZoneIdentifier=f"{mocked_networking['subnet1']},{mocked_networking['subnet2']}",
|
||||
NewInstancesProtectedFromScaleIn=False,
|
||||
)
|
||||
|
||||
|
@ -61,7 +61,7 @@ class TestAutoScalingGroup(TestCase):
|
||||
def test_list_many_autoscaling_groups(self):
|
||||
|
||||
for i in range(51):
|
||||
self._create_group("TestGroup%d" % i)
|
||||
self._create_group(f"TestGroup{i}")
|
||||
|
||||
response = self.as_client.describe_auto_scaling_groups()
|
||||
groups = response["AutoScalingGroups"]
|
||||
|
@ -203,10 +203,7 @@ class TestAutoScalingELB(TestCase):
|
||||
}
|
||||
],
|
||||
TerminationPolicies=["OldestInstance", "NewestInstance"],
|
||||
VPCZoneIdentifier="{subnet1},{subnet2}".format(
|
||||
subnet1=self.mocked_networking["subnet1"],
|
||||
subnet2=self.mocked_networking["subnet2"],
|
||||
),
|
||||
VPCZoneIdentifier=f"{self.mocked_networking['subnet1']},{self.mocked_networking['subnet2']}",
|
||||
)
|
||||
|
||||
self.as_client.put_scheduled_update_group_action(
|
||||
@ -231,10 +228,7 @@ class TestAutoScalingELB(TestCase):
|
||||
group["MinSize"].should.equal(INSTANCE_COUNT_GROUP)
|
||||
group["Instances"].should.have.length_of(INSTANCE_COUNT_GROUP)
|
||||
group["VPCZoneIdentifier"].should.equal(
|
||||
"{subnet1},{subnet2}".format(
|
||||
subnet1=self.mocked_networking["subnet1"],
|
||||
subnet2=self.mocked_networking["subnet2"],
|
||||
)
|
||||
f"{self.mocked_networking['subnet1']},{self.mocked_networking['subnet2']}"
|
||||
)
|
||||
group["LaunchConfigurationName"].should.equal(self.lc_name)
|
||||
group["DefaultCooldown"].should.equal(60)
|
||||
|
@ -21,9 +21,7 @@ def test_create_launch_configuration():
|
||||
SecurityGroups=["default", "default2"],
|
||||
UserData="This is some user_data",
|
||||
InstanceMonitoring={"Enabled": True},
|
||||
IamInstanceProfile="arn:aws:iam::{}:instance-profile/testing".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
IamInstanceProfile=f"arn:aws:iam::{ACCOUNT_ID}:instance-profile/testing",
|
||||
SpotPrice="0.1",
|
||||
)
|
||||
|
||||
@ -39,7 +37,7 @@ def test_create_launch_configuration():
|
||||
userdata.should.equal(b"This is some user_data")
|
||||
launch_config["InstanceMonitoring"].should.equal({"Enabled": True})
|
||||
launch_config["IamInstanceProfile"].should.equal(
|
||||
"arn:aws:iam::{}:instance-profile/testing".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:instance-profile/testing"
|
||||
)
|
||||
launch_config["SpotPrice"].should.equal("0.1")
|
||||
launch_config["BlockDeviceMappings"].should.equal([])
|
||||
@ -56,9 +54,7 @@ def test_create_launch_configuration_with_block_device_mappings():
|
||||
SecurityGroups=["default", "default2"],
|
||||
UserData="This is some user_data",
|
||||
InstanceMonitoring={"Enabled": True},
|
||||
IamInstanceProfile="arn:aws:iam::{}:instance-profile/testing".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
IamInstanceProfile=f"arn:aws:iam::{ACCOUNT_ID}:instance-profile/testing",
|
||||
SpotPrice="0.1",
|
||||
BlockDeviceMappings=[
|
||||
{"DeviceName": "/dev/xvdb", "VirtualName": "ephemeral0"},
|
||||
@ -236,7 +232,7 @@ def test_launch_configuration_describe_paginated():
|
||||
conn = boto3.client("autoscaling", region_name="us-east-1")
|
||||
for i in range(51):
|
||||
conn.create_launch_configuration(
|
||||
LaunchConfigurationName="TestLC%d" % i,
|
||||
LaunchConfigurationName=f"TestLC{i}",
|
||||
ImageId=EXAMPLE_AMI_ID,
|
||||
InstanceType="t2.medium",
|
||||
)
|
||||
|
@ -60,17 +60,13 @@ def test_list_functions():
|
||||
v1 = [f for f in our_functions if f["Version"] == "1"][0]
|
||||
v1["Description"].should.equal("v2")
|
||||
v1["FunctionArn"].should.equal(
|
||||
"arn:aws:lambda:{}:{}:function:{}:1".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}:1"
|
||||
)
|
||||
|
||||
latest = [f for f in our_functions if f["Version"] == "$LATEST"][0]
|
||||
latest["Description"].should.equal("")
|
||||
latest["FunctionArn"].should.equal(
|
||||
"arn:aws:lambda:{}:{}:function:{}:$LATEST".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}:$LATEST"
|
||||
)
|
||||
|
||||
|
||||
@ -125,9 +121,7 @@ def test_create_function_from_aws_bucket():
|
||||
|
||||
result.should.have.key("FunctionName").equals(function_name)
|
||||
result.should.have.key("FunctionArn").equals(
|
||||
"arn:aws:lambda:{}:{}:function:{}".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}"
|
||||
)
|
||||
result.should.have.key("Runtime").equals("python2.7")
|
||||
result.should.have.key("Handler").equals("lambda_function.lambda_handler")
|
||||
@ -163,9 +157,7 @@ def test_create_function_from_zipfile():
|
||||
result.should.equal(
|
||||
{
|
||||
"FunctionName": function_name,
|
||||
"FunctionArn": "arn:aws:lambda:{}:{}:function:{}".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
),
|
||||
"FunctionArn": f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}",
|
||||
"Runtime": "python2.7",
|
||||
"Role": result["Role"],
|
||||
"Handler": "lambda_function.lambda_handler",
|
||||
@ -281,7 +273,7 @@ def test_get_function():
|
||||
result["Configuration"].pop("LastModified")
|
||||
|
||||
result["Code"]["Location"].should.equal(
|
||||
"s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com/test.zip".format(_lambda_region)
|
||||
f"s3://awslambda-{_lambda_region}-tasks.s3-{_lambda_region}.amazonaws.com/test.zip"
|
||||
)
|
||||
result["Code"]["RepositoryType"].should.equal("S3")
|
||||
|
||||
@ -309,9 +301,7 @@ def test_get_function():
|
||||
result = conn.get_function(FunctionName=function_name, Qualifier="$LATEST")
|
||||
result["Configuration"]["Version"].should.equal("$LATEST")
|
||||
result["Configuration"]["FunctionArn"].should.equal(
|
||||
"arn:aws:lambda:us-west-2:{}:function:{}:$LATEST".format(
|
||||
ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:us-west-2:{ACCOUNT_ID}:function:{function_name}:$LATEST"
|
||||
)
|
||||
|
||||
# Test get function when can't find function name
|
||||
@ -376,9 +366,7 @@ def test_get_function_configuration(key):
|
||||
)
|
||||
result["Version"].should.equal("$LATEST")
|
||||
result["FunctionArn"].should.equal(
|
||||
"arn:aws:lambda:{}:{}:function:{}:$LATEST".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}:$LATEST"
|
||||
)
|
||||
|
||||
# Test get function when can't find function name
|
||||
@ -591,7 +579,7 @@ def test_publish():
|
||||
|
||||
# #SetComprehension ;-)
|
||||
published_arn = list({f["FunctionArn"] for f in our_functions} - {latest_arn})[0]
|
||||
published_arn.should.contain("{}:1".format(function_name))
|
||||
published_arn.should.contain(f"{function_name}:1")
|
||||
|
||||
conn.delete_function(FunctionName=function_name, Qualifier="1")
|
||||
|
||||
@ -639,9 +627,7 @@ def test_list_create_list_get_delete_list():
|
||||
)
|
||||
expected_function_result = {
|
||||
"Code": {
|
||||
"Location": "s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com/test.zip".format(
|
||||
_lambda_region
|
||||
),
|
||||
"Location": f"s3://awslambda-{_lambda_region}-tasks.s3-{_lambda_region}.amazonaws.com/test.zip",
|
||||
"RepositoryType": "S3",
|
||||
},
|
||||
"Configuration": {
|
||||
@ -673,9 +659,7 @@ def test_list_create_list_get_delete_list():
|
||||
f["FunctionArn"] for f in functions if f["FunctionName"] == function_name
|
||||
][0]
|
||||
func_arn.should.equal(
|
||||
"arn:aws:lambda:{}:{}:function:{}".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}"
|
||||
)
|
||||
functions = conn.list_functions(FunctionVersion="ALL")["Functions"]
|
||||
our_functions = [f for f in functions if f["FunctionName"] == function_name]
|
||||
@ -683,9 +667,7 @@ def test_list_create_list_get_delete_list():
|
||||
|
||||
latest = [f for f in our_functions if f["Version"] == "$LATEST"][0]
|
||||
latest["FunctionArn"].should.equal(
|
||||
"arn:aws:lambda:{}:{}:function:{}:$LATEST".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}:$LATEST"
|
||||
)
|
||||
latest.pop("FunctionArn")
|
||||
latest.pop("LastModified")
|
||||
@ -694,17 +676,13 @@ def test_list_create_list_get_delete_list():
|
||||
published = [f for f in our_functions if f["Version"] != "$LATEST"][0]
|
||||
published["Version"].should.equal("1")
|
||||
published["FunctionArn"].should.equal(
|
||||
"arn:aws:lambda:{}:{}:function:{}:1".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}:1"
|
||||
)
|
||||
|
||||
func = conn.get_function(FunctionName=function_name)
|
||||
|
||||
func["Configuration"]["FunctionArn"].should.equal(
|
||||
"arn:aws:lambda:{}:{}:function:{}".format(
|
||||
_lambda_region, ACCOUNT_ID, function_name
|
||||
)
|
||||
f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:function:{function_name}"
|
||||
)
|
||||
|
||||
# this is hard to match against, so remove it
|
||||
@ -746,7 +724,7 @@ def test_get_function_created_with_zipfile():
|
||||
assert len(response["Code"]) == 2
|
||||
assert response["Code"]["RepositoryType"] == "S3"
|
||||
assert response["Code"]["Location"].startswith(
|
||||
"s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com".format(_lambda_region)
|
||||
f"s3://awslambda-{_lambda_region}-tasks.s3-{_lambda_region}.amazonaws.com"
|
||||
)
|
||||
response.should.have.key("Configuration")
|
||||
config = response["Configuration"]
|
||||
@ -801,17 +779,18 @@ def test_list_versions_by_function():
|
||||
assert res["ResponseMetadata"]["HTTPStatusCode"] == 201
|
||||
versions = conn.list_versions_by_function(FunctionName=function_name)
|
||||
assert len(versions["Versions"]) == 3
|
||||
assert versions["Versions"][0][
|
||||
"FunctionArn"
|
||||
] == "arn:aws:lambda:us-west-2:{}:function:{}:$LATEST".format(
|
||||
ACCOUNT_ID, function_name
|
||||
assert (
|
||||
versions["Versions"][0]["FunctionArn"]
|
||||
== f"arn:aws:lambda:us-west-2:{ACCOUNT_ID}:function:{function_name}:$LATEST"
|
||||
)
|
||||
assert (
|
||||
versions["Versions"][1]["FunctionArn"]
|
||||
== f"arn:aws:lambda:us-west-2:{ACCOUNT_ID}:function:{function_name}:1"
|
||||
)
|
||||
assert (
|
||||
versions["Versions"][2]["FunctionArn"]
|
||||
== f"arn:aws:lambda:us-west-2:{ACCOUNT_ID}:function:{function_name}:2"
|
||||
)
|
||||
assert versions["Versions"][1][
|
||||
"FunctionArn"
|
||||
] == "arn:aws:lambda:us-west-2:{}:function:{}:1".format(ACCOUNT_ID, function_name)
|
||||
assert versions["Versions"][2][
|
||||
"FunctionArn"
|
||||
] == "arn:aws:lambda:us-west-2:{}:function:{}:2".format(ACCOUNT_ID, function_name)
|
||||
|
||||
conn.create_function(
|
||||
FunctionName="testFunction_2",
|
||||
@ -826,10 +805,9 @@ def test_list_versions_by_function():
|
||||
)
|
||||
versions = conn.list_versions_by_function(FunctionName="testFunction_2")
|
||||
assert len(versions["Versions"]) == 1
|
||||
assert versions["Versions"][0][
|
||||
"FunctionArn"
|
||||
] == "arn:aws:lambda:us-west-2:{}:function:testFunction_2:$LATEST".format(
|
||||
ACCOUNT_ID
|
||||
assert (
|
||||
versions["Versions"][0]["FunctionArn"]
|
||||
== f"arn:aws:lambda:us-west-2:{ACCOUNT_ID}:function:testFunction_2:$LATEST"
|
||||
)
|
||||
|
||||
|
||||
@ -980,7 +958,7 @@ def test_update_function_zip(key):
|
||||
assert len(response["Code"]) == 2
|
||||
assert response["Code"]["RepositoryType"] == "S3"
|
||||
assert response["Code"]["Location"].startswith(
|
||||
"s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com".format(_lambda_region)
|
||||
f"s3://awslambda-{_lambda_region}-tasks.s3-{_lambda_region}.amazonaws.com"
|
||||
)
|
||||
|
||||
config = response["Configuration"]
|
||||
@ -1038,7 +1016,7 @@ def test_update_function_s3():
|
||||
assert len(response["Code"]) == 2
|
||||
assert response["Code"]["RepositoryType"] == "S3"
|
||||
assert response["Code"]["Location"].startswith(
|
||||
"s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com".format(_lambda_region)
|
||||
f"s3://awslambda-{_lambda_region}-tasks.s3-{_lambda_region}.amazonaws.com"
|
||||
)
|
||||
|
||||
config = response["Configuration"]
|
||||
|
@ -238,7 +238,7 @@ def test_invoke_function_from_sqs_exception():
|
||||
|
||||
entries = []
|
||||
for i in range(3):
|
||||
body = {"uuid": str(uuid.uuid4()), "test": "test_{}".format(i)}
|
||||
body = {"uuid": str(uuid.uuid4()), "test": f"test_{i}"}
|
||||
entry = {"Id": str(i), "MessageBody": json.dumps(body)}
|
||||
entries.append(entry)
|
||||
|
||||
|
@ -65,9 +65,7 @@ def test_get_lambda_layers():
|
||||
for version in result["LayerVersions"]:
|
||||
version.pop("CreatedDate")
|
||||
result["LayerVersions"].sort(key=lambda x: x["Version"])
|
||||
expected_arn = "arn:aws:lambda:{0}:{1}:layer:{2}:".format(
|
||||
_lambda_region, ACCOUNT_ID, layer_name
|
||||
)
|
||||
expected_arn = f"arn:aws:lambda:{_lambda_region}:{ACCOUNT_ID}:layer:{layer_name}:"
|
||||
result["LayerVersions"].should.equal(
|
||||
[
|
||||
{
|
||||
|
@ -106,15 +106,15 @@ def test_tags_not_found():
|
||||
"""
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
conn.list_tags.when.called_with(
|
||||
Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID)
|
||||
Resource=f"arn:aws:lambda:{ACCOUNT_ID}:function:not-found"
|
||||
).should.throw(botocore.client.ClientError)
|
||||
|
||||
conn.tag_resource.when.called_with(
|
||||
Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID),
|
||||
Resource=f"arn:aws:lambda:{ACCOUNT_ID}:function:not-found",
|
||||
Tags=dict(spam="eggs"),
|
||||
).should.throw(botocore.client.ClientError)
|
||||
|
||||
conn.untag_resource.when.called_with(
|
||||
Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID),
|
||||
Resource=f"arn:aws:lambda:{ACCOUNT_ID}:function:not-found",
|
||||
TagKeys=["spam"],
|
||||
).should.throw(botocore.client.ClientError)
|
||||
|
@ -30,7 +30,12 @@ def lambda_handler(event, context):
|
||||
|
||||
|
||||
def get_test_zip_file2():
|
||||
func_str = """
|
||||
base_url = (
|
||||
"motoserver:5000"
|
||||
if settings.TEST_SERVER_MODE
|
||||
else "ec2.us-west-2.amazonaws.com"
|
||||
)
|
||||
func_str = f"""
|
||||
import boto3
|
||||
|
||||
def lambda_handler(event, context):
|
||||
@ -40,11 +45,7 @@ def lambda_handler(event, context):
|
||||
vol = ec2.Volume(volume_id)
|
||||
|
||||
return {{'id': vol.id, 'state': vol.state, 'size': vol.size}}
|
||||
""".format(
|
||||
base_url="motoserver:5000"
|
||||
if settings.TEST_SERVER_MODE
|
||||
else "ec2.us-west-2.amazonaws.com"
|
||||
)
|
||||
"""
|
||||
return _process_lambda(func_str)
|
||||
|
||||
|
||||
|
@ -385,9 +385,7 @@ def _wait_for_job_statuses(client, job_id, statuses, seconds_to_wait=30):
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Time out waiting for job status {status}!\n Last status: {last_status}".format(
|
||||
status=statuses, last_status=last_job_status
|
||||
)
|
||||
f"Time out waiting for job status {statuses}!\n Last status: {last_job_status}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -18,7 +18,7 @@ def test_register_task_definition(use_resource_reqs):
|
||||
resp.should.contain("revision")
|
||||
|
||||
assert resp["jobDefinitionArn"].endswith(
|
||||
"{0}:{1}".format(resp["jobDefinitionName"], resp["revision"])
|
||||
f"{resp['jobDefinitionName']}:{resp['revision']}"
|
||||
)
|
||||
|
||||
|
||||
@ -113,7 +113,7 @@ def test_reregister_task_definition(use_resource_reqs):
|
||||
resp1.should.contain("revision")
|
||||
|
||||
assert resp1["jobDefinitionArn"].endswith(
|
||||
"{0}:{1}".format(resp1["jobDefinitionName"], resp1["revision"])
|
||||
f"{resp1['jobDefinitionName']}:{resp1['revision']}"
|
||||
)
|
||||
resp1["revision"].should.equal(1)
|
||||
|
||||
|
@ -12,7 +12,7 @@ from .fixtures.custom_lambda import get_template
|
||||
|
||||
|
||||
def get_lambda_code():
|
||||
pfunc = """
|
||||
return f"""
|
||||
def lambda_handler(event, context):
|
||||
# Need to print this, one of the tests verifies the correct input
|
||||
print(event)
|
||||
@ -21,17 +21,14 @@ def lambda_handler(event, context):
|
||||
response["StackId"] = event["StackId"]
|
||||
response["RequestId"] = event["RequestId"]
|
||||
response["LogicalResourceId"] = event["LogicalResourceId"]
|
||||
response["PhysicalResourceId"] = "{resource_id}"
|
||||
response["PhysicalResourceId"] = "CustomResource{str(uuid4())[0:6]}"
|
||||
response_data = dict()
|
||||
response_data["info_value"] = "special value"
|
||||
if event["RequestType"] == "Create":
|
||||
response["Data"] = response_data
|
||||
import cfnresponse
|
||||
cfnresponse.send(event, context, cfnresponse.SUCCESS, response_data)
|
||||
""".format(
|
||||
resource_id=f"CustomResource{str(uuid4())[0:6]}"
|
||||
)
|
||||
return pfunc
|
||||
"""
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
|
@ -112,9 +112,7 @@ def test_create_stack_with_depends_on():
|
||||
|
||||
ecs = boto3.client("ecs", region_name="us-east-1")
|
||||
cluster_arn = ecs.list_clusters()["clusterArns"][0]
|
||||
assert cluster_arn == "arn:aws:ecs:us-east-1:{}:cluster/test-cluster".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
assert cluster_arn == f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test-cluster"
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
|
@ -1075,10 +1075,10 @@ def test_creating_stacks_across_regions():
|
||||
list(west2_cf.stacks.all()).should.have.length_of(1)
|
||||
|
||||
list(west1_cf.stacks.all())[0].stack_id.should.contain(
|
||||
"arn:aws:cloudformation:us-west-1:{}:stack/test_stack/".format(ACCOUNT_ID)
|
||||
f"arn:aws:cloudformation:us-west-1:{ACCOUNT_ID}:stack/test_stack/"
|
||||
)
|
||||
list(west2_cf.stacks.all())[0].stack_id.should.contain(
|
||||
"arn:aws:cloudformation:us-west-2:{}:stack/test_stack/".format(ACCOUNT_ID)
|
||||
f"arn:aws:cloudformation:us-west-2:{ACCOUNT_ID}:stack/test_stack/"
|
||||
)
|
||||
|
||||
|
||||
@ -1110,7 +1110,7 @@ def test_create_stack_with_notification_arn():
|
||||
messages.should.have.length_of(1)
|
||||
msg = json.loads(messages[0].body)
|
||||
msg["Subject"].should.equal("AWS CloudFormation Notification")
|
||||
msg["Message"].should.contain("StackId='{}'\n".format(stack.stack_id))
|
||||
msg["Message"].should.contain(f"StackId='{stack.stack_id}'\n")
|
||||
msg["Message"].should.contain("LogicalResourceId='test_stack_with_notifications'\n")
|
||||
msg["Message"].should.contain("ResourceStatus='CREATE_IN_PROGRESS'\n")
|
||||
msg["Message"].should.contain("ResourceStatusReason='User Initiated'\n")
|
||||
@ -1128,7 +1128,7 @@ def test_create_stack_with_notification_arn():
|
||||
messages = queue.receive_messages()
|
||||
messages.should.have.length_of(1)
|
||||
msg = json.loads(messages[0].body)
|
||||
msg["Message"].should.contain("StackId='{}'\n".format(stack.stack_id))
|
||||
msg["Message"].should.contain(f"StackId='{stack.stack_id}'\n")
|
||||
msg["Message"].should.contain("LogicalResourceId='test_stack_with_notifications'\n")
|
||||
msg["Message"].should.contain("ResourceStatus='CREATE_COMPLETE'\n")
|
||||
msg["Message"].should.contain("ResourceStatusReason='None'\n")
|
||||
@ -1150,10 +1150,10 @@ def test_create_stack_with_role_arn():
|
||||
cf.create_stack(
|
||||
StackName="test_stack_with_notifications",
|
||||
TemplateBody=dummy_template_json,
|
||||
RoleARN="arn:aws:iam::{}:role/moto".format(ACCOUNT_ID),
|
||||
RoleARN=f"arn:aws:iam::{ACCOUNT_ID}:role/moto",
|
||||
)
|
||||
stack = list(cf.stacks.all())[0]
|
||||
stack.role_arn.should.equal("arn:aws:iam::{}:role/moto".format(ACCOUNT_ID))
|
||||
stack.role_arn.should.equal(f"arn:aws:iam::{ACCOUNT_ID}:role/moto")
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@ -1380,11 +1380,11 @@ def test_create_change_set_from_s3_url():
|
||||
Tags=[{"Key": "tag-key", "Value": "tag-value"}],
|
||||
)
|
||||
assert (
|
||||
"arn:aws:cloudformation:us-west-1:{}:changeSet/NewChangeSet/".format(ACCOUNT_ID)
|
||||
f"arn:aws:cloudformation:us-west-1:{ACCOUNT_ID}:changeSet/NewChangeSet/"
|
||||
in response["Id"]
|
||||
)
|
||||
assert (
|
||||
"arn:aws:cloudformation:us-west-1:{}:stack/NewStack".format(ACCOUNT_ID)
|
||||
f"arn:aws:cloudformation:us-west-1:{ACCOUNT_ID}:stack/NewStack"
|
||||
in response["StackId"]
|
||||
)
|
||||
|
||||
@ -1503,9 +1503,7 @@ def test_execute_change_set_w_name():
|
||||
def test_describe_stack_pagination():
|
||||
conn = boto3.client("cloudformation", region_name="us-east-1")
|
||||
for i in range(100):
|
||||
conn.create_stack(
|
||||
StackName="test_stack_{}".format(i), TemplateBody=dummy_template_json
|
||||
)
|
||||
conn.create_stack(StackName=f"test_stack_{i}", TemplateBody=dummy_template_json)
|
||||
|
||||
resp = conn.describe_stacks()
|
||||
stacks = resp["Stacks"]
|
||||
@ -1789,7 +1787,7 @@ def test_describe_updated_stack():
|
||||
|
||||
cf_conn.update_stack(
|
||||
StackName="test_stack",
|
||||
RoleARN="arn:aws:iam::{}:role/moto".format(ACCOUNT_ID),
|
||||
RoleARN=f"arn:aws:iam::{ACCOUNT_ID}:role/moto",
|
||||
TemplateBody=dummy_update_template_json,
|
||||
Tags=[{"Key": "foo", "Value": "baz"}],
|
||||
Parameters=[{"ParameterKey": "KeyName", "ParameterValue": "value"}],
|
||||
@ -1801,7 +1799,7 @@ def test_describe_updated_stack():
|
||||
stack_by_id["StackId"].should.equal(stack["StackId"])
|
||||
stack_by_id["StackName"].should.equal("test_stack")
|
||||
stack_by_id["StackStatus"].should.equal("UPDATE_COMPLETE")
|
||||
stack_by_id["RoleARN"].should.equal("arn:aws:iam::{}:role/moto".format(ACCOUNT_ID))
|
||||
stack_by_id["RoleARN"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:role/moto")
|
||||
stack_by_id["Tags"].should.equal([{"Key": "foo", "Value": "baz"}])
|
||||
|
||||
# Verify the updated template is persisted
|
||||
@ -1937,9 +1935,7 @@ def test_update_stack_when_rolled_back():
|
||||
err = ex.value.response["Error"]
|
||||
err.should.have.key("Code").being.equal("ValidationError")
|
||||
err.should.have.key("Message").match(
|
||||
r"Stack:arn:aws:cloudformation:us-east-1:{}:stack/test_stack/[a-z0-9-]+ is in ROLLBACK_COMPLETE state and can not be updated.".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
rf"Stack:arn:aws:cloudformation:us-east-1:{ACCOUNT_ID}:stack/test_stack/[a-z0-9-]+ is in ROLLBACK_COMPLETE state and can not be updated."
|
||||
)
|
||||
|
||||
|
||||
@ -2121,7 +2117,7 @@ def test_list_exports_with_token():
|
||||
# Add index to ensure name is unique
|
||||
dummy_output_template["Outputs"]["StackVPC"]["Export"]["Name"] += str(i)
|
||||
cf.create_stack(
|
||||
StackName="test_stack_{}".format(i),
|
||||
StackName=f"test_stack_{i}",
|
||||
TemplateBody=json.dumps(dummy_output_template),
|
||||
)
|
||||
exports = cf.list_exports()
|
||||
|
@ -52,7 +52,7 @@ def test_fn_join_boto3():
|
||||
|
||||
stack = cf.describe_stacks()["Stacks"][0]
|
||||
fn_join_output = stack["Outputs"][0]
|
||||
fn_join_output["OutputValue"].should.equal("test eip:{0}".format(eip["PublicIp"]))
|
||||
fn_join_output["OutputValue"].should.equal(f"test eip:{eip['PublicIp']}")
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@ -301,9 +301,7 @@ def lambda_handler(event, context):
|
||||
[
|
||||
{
|
||||
"Version": 1,
|
||||
"LayerVersionArn": "arn:aws:lambda:{}:{}:layer:{}:1".format(
|
||||
region, ACCOUNT_ID, layer_name
|
||||
),
|
||||
"LayerVersionArn": f"arn:aws:lambda:{region}:{ACCOUNT_ID}:layer:{layer_name}:1",
|
||||
"CompatibleRuntimes": ["python2.7", "python3.6"],
|
||||
"Description": "Test Layer",
|
||||
"LicenseInfo": "MIT",
|
||||
@ -424,7 +422,7 @@ def test_stack_spot_fleet():
|
||||
"Type": "AWS::EC2::SpotFleet",
|
||||
"Properties": {
|
||||
"SpotFleetRequestConfigData": {
|
||||
"IamFleetRole": "arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID),
|
||||
"IamFleetRole": f"arn:aws:iam::{ACCOUNT_ID}:role/fleet",
|
||||
"SpotPrice": "0.12",
|
||||
"TargetCapacity": 6,
|
||||
"AllocationStrategy": "diversified",
|
||||
@ -445,9 +443,7 @@ def test_stack_spot_fleet():
|
||||
"SecurityGroups": [{"GroupId": "sg-123"}],
|
||||
"SubnetId": subnet_id,
|
||||
"IamInstanceProfile": {
|
||||
"Arn": "arn:aws:iam::{}:role/fleet".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
"Arn": f"arn:aws:iam::{ACCOUNT_ID}:role/fleet"
|
||||
},
|
||||
"WeightedCapacity": "4",
|
||||
"SpotPrice": "10.00",
|
||||
@ -480,7 +476,7 @@ def test_stack_spot_fleet():
|
||||
spot_fleet_config["SpotPrice"].should.equal("0.12")
|
||||
spot_fleet_config["TargetCapacity"].should.equal(6)
|
||||
spot_fleet_config["IamFleetRole"].should.equal(
|
||||
"arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/fleet"
|
||||
)
|
||||
spot_fleet_config["AllocationStrategy"].should.equal("diversified")
|
||||
spot_fleet_config["FulfilledCapacity"].should.equal(6.0)
|
||||
@ -513,7 +509,7 @@ def test_stack_spot_fleet_should_figure_out_default_price():
|
||||
"Type": "AWS::EC2::SpotFleet",
|
||||
"Properties": {
|
||||
"SpotFleetRequestConfigData": {
|
||||
"IamFleetRole": "arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID),
|
||||
"IamFleetRole": f"arn:aws:iam::{ACCOUNT_ID}:role/fleet",
|
||||
"TargetCapacity": 6,
|
||||
"AllocationStrategy": "diversified",
|
||||
"LaunchSpecifications": [
|
||||
@ -532,9 +528,7 @@ def test_stack_spot_fleet_should_figure_out_default_price():
|
||||
"SecurityGroups": [{"GroupId": "sg-123"}],
|
||||
"SubnetId": subnet_id,
|
||||
"IamInstanceProfile": {
|
||||
"Arn": "arn:aws:iam::{}:role/fleet".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
"Arn": f"arn:aws:iam::{ACCOUNT_ID}:role/fleet"
|
||||
},
|
||||
"WeightedCapacity": "4",
|
||||
},
|
||||
|
@ -19,8 +19,8 @@ def test_create_alarm():
|
||||
Dimensions=[{"Name": "InstanceId", "Value": "i-0123457"}],
|
||||
EvaluationPeriods=5,
|
||||
InsufficientDataActions=["arn:insufficient"],
|
||||
Namespace="{0}_namespace".format(name),
|
||||
MetricName="{0}_metric".format(name),
|
||||
Namespace=f"{name}_namespace",
|
||||
MetricName=f"{name}_metric",
|
||||
OKActions=["arn:ok"],
|
||||
Period=60,
|
||||
Statistic="Average",
|
||||
@ -48,7 +48,7 @@ def test_create_alarm():
|
||||
alarm.should.have.key("InsufficientDataActions").equal(["arn:insufficient"])
|
||||
alarm.should.have.key("Unit").equal("Seconds")
|
||||
alarm.should.have.key("AlarmArn").equal(
|
||||
"arn:aws:cloudwatch:{}:{}:alarm:{}".format(region, ACCOUNT_ID, name)
|
||||
f"arn:aws:cloudwatch:{region}:{ACCOUNT_ID}:alarm:{name}"
|
||||
)
|
||||
# default value should be True
|
||||
alarm.should.have.key("ActionsEnabled").equal(True)
|
||||
@ -70,8 +70,8 @@ def test_delete_alarm():
|
||||
Dimensions=[{"Name": "InstanceId", "Value": "i-0123457"}],
|
||||
EvaluationPeriods=5,
|
||||
InsufficientDataActions=["arn:insufficient"],
|
||||
Namespace="{0}_namespace".format(name),
|
||||
MetricName="{0}_metric".format(name),
|
||||
Namespace=f"{name}_namespace",
|
||||
MetricName=f"{name}_metric",
|
||||
OKActions=["arn:ok"],
|
||||
Period=60,
|
||||
Statistic="Average",
|
||||
|
@ -27,9 +27,7 @@ def test_codebuild_create_project_s3_artifacts():
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
response = client.create_project(
|
||||
@ -78,9 +76,7 @@ def test_codebuild_create_project_no_artifacts():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
response = client.create_project(
|
||||
@ -127,9 +123,7 @@ def test_codebuild_create_project_with_invalid_name():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
with pytest.raises(client.exceptions.from_code("InvalidInputException")) as err:
|
||||
@ -160,9 +154,7 @@ def test_codebuild_create_project_with_invalid_name_length():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
with pytest.raises(client.exceptions.from_code("InvalidInputException")) as err:
|
||||
@ -192,9 +184,7 @@ def test_codebuild_create_project_when_exists():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -234,9 +224,7 @@ def test_codebuild_list_projects():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -275,9 +263,7 @@ def test_codebuild_list_builds_for_project_no_history():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -309,9 +295,7 @@ def test_codebuild_list_builds_for_project_with_history():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -392,9 +376,7 @@ def test_codebuild_start_build_no_overrides():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -426,9 +408,7 @@ def test_codebuild_start_build_multiple_times():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -463,9 +443,7 @@ def test_codebuild_start_build_with_overrides():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
branch_override = "fix/testing"
|
||||
@ -504,9 +482,7 @@ def test_codebuild_batch_get_builds_1_project():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -543,9 +519,7 @@ def test_codebuild_batch_get_builds_2_projects():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -584,7 +558,7 @@ def test_codebuild_batch_get_builds_invalid_build_id():
|
||||
client = boto3.client("codebuild", region_name="eu-central-1")
|
||||
|
||||
with pytest.raises(client.exceptions.InvalidInputException) as err:
|
||||
client.batch_get_builds(ids=["some_project{}".format(uuid1())])
|
||||
client.batch_get_builds(ids=[f"some_project{uuid1()}"])
|
||||
err.value.response["Error"]["Code"].should.equal("InvalidInputException")
|
||||
|
||||
|
||||
@ -613,9 +587,7 @@ def test_codebuild_delete_project():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -653,9 +625,7 @@ def test_codebuild_stop_build():
|
||||
environment["image"] = "contents_not_validated"
|
||||
environment["computeType"] = "BUILD_GENERAL1_SMALL"
|
||||
service_role = (
|
||||
"arn:aws:iam::{0}:role/service-role/my-codebuild-service-role".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/service-role/my-codebuild-service-role"
|
||||
)
|
||||
|
||||
client.create_project(
|
||||
@ -678,7 +648,7 @@ def test_codebuild_stop_build_no_build():
|
||||
client = boto3.client("codebuild", region_name="eu-central-1")
|
||||
|
||||
with pytest.raises(client.exceptions.ResourceNotFoundException) as err:
|
||||
client.stop_build(id="some_project:{0}".format(uuid1()))
|
||||
client.stop_build(id=f"some_project:{uuid1()}")
|
||||
err.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
|
||||
|
||||
|
||||
@ -687,5 +657,5 @@ def test_codebuild_stop_build_bad_uid():
|
||||
client = boto3.client("codebuild", region_name="eu-central-1")
|
||||
|
||||
with pytest.raises(client.exceptions.InvalidInputException) as err:
|
||||
client.stop_build(id="some_project{0}".format(uuid1()))
|
||||
client.stop_build(id=f"some_project{uuid1()}")
|
||||
err.value.response["Error"]["Code"].should.equal("InvalidInputException")
|
||||
|
@ -24,19 +24,13 @@ def test_create_repository():
|
||||
"description repo one"
|
||||
)
|
||||
response["repositoryMetadata"]["cloneUrlSsh"].should.equal(
|
||||
"ssh://git-codecommit.{0}.amazonaws.com/v1/repos/{1}".format(
|
||||
"eu-central-1", "repository_one"
|
||||
)
|
||||
"ssh://git-codecommit.eu-central-1.amazonaws.com/v1/repos/repository_one"
|
||||
)
|
||||
response["repositoryMetadata"]["cloneUrlHttp"].should.equal(
|
||||
"https://git-codecommit.{0}.amazonaws.com/v1/repos/{1}".format(
|
||||
"eu-central-1", "repository_one"
|
||||
)
|
||||
"https://git-codecommit.eu-central-1.amazonaws.com/v1/repos/repository_one"
|
||||
)
|
||||
response["repositoryMetadata"]["Arn"].should.equal(
|
||||
"arn:aws:codecommit:{0}:{1}:{2}".format(
|
||||
"eu-central-1", ACCOUNT_ID, "repository_one"
|
||||
)
|
||||
f"arn:aws:codecommit:eu-central-1:{ACCOUNT_ID}:repository_one"
|
||||
)
|
||||
response["repositoryMetadata"]["accountId"].should.equal(ACCOUNT_ID)
|
||||
|
||||
@ -58,19 +52,13 @@ def test_create_repository_without_description():
|
||||
response["repositoryMetadata"]["lastModifiedDate"].should_not.be.none
|
||||
response["repositoryMetadata"]["repositoryId"].should_not.be.empty
|
||||
response["repositoryMetadata"]["cloneUrlSsh"].should.equal(
|
||||
"ssh://git-codecommit.{0}.amazonaws.com/v1/repos/{1}".format(
|
||||
"eu-central-1", "repository_two"
|
||||
)
|
||||
"ssh://git-codecommit.eu-central-1.amazonaws.com/v1/repos/repository_two"
|
||||
)
|
||||
response["repositoryMetadata"]["cloneUrlHttp"].should.equal(
|
||||
"https://git-codecommit.{0}.amazonaws.com/v1/repos/{1}".format(
|
||||
"eu-central-1", "repository_two"
|
||||
)
|
||||
"https://git-codecommit.eu-central-1.amazonaws.com/v1/repos/repository_two"
|
||||
)
|
||||
response["repositoryMetadata"]["Arn"].should.equal(
|
||||
"arn:aws:codecommit:{0}:{1}:{2}".format(
|
||||
"eu-central-1", ACCOUNT_ID, "repository_two"
|
||||
)
|
||||
f"arn:aws:codecommit:eu-central-1:{ACCOUNT_ID}:repository_two"
|
||||
)
|
||||
response["repositoryMetadata"]["accountId"].should.equal(ACCOUNT_ID)
|
||||
|
||||
@ -91,7 +79,7 @@ def test_create_repository_repository_name_exists():
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("RepositoryNameExistsException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Repository named {0} already exists".format("repository_two")
|
||||
"Repository named repository_two already exists"
|
||||
)
|
||||
|
||||
|
||||
@ -138,19 +126,13 @@ def test_get_repository():
|
||||
"description repo one"
|
||||
)
|
||||
response.get("repositoryMetadata").get("cloneUrlSsh").should.equal(
|
||||
"ssh://git-codecommit.{0}.amazonaws.com/v1/repos/{1}".format(
|
||||
"eu-central-1", "repository_one"
|
||||
)
|
||||
"ssh://git-codecommit.eu-central-1.amazonaws.com/v1/repos/repository_one"
|
||||
)
|
||||
response.get("repositoryMetadata").get("cloneUrlHttp").should.equal(
|
||||
"https://git-codecommit.{0}.amazonaws.com/v1/repos/{1}".format(
|
||||
"eu-central-1", "repository_one"
|
||||
)
|
||||
"https://git-codecommit.eu-central-1.amazonaws.com/v1/repos/repository_one"
|
||||
)
|
||||
response.get("repositoryMetadata").get("Arn").should.equal(
|
||||
"arn:aws:codecommit:{0}:{1}:{2}".format(
|
||||
"eu-central-1", ACCOUNT_ID, "repository_one"
|
||||
)
|
||||
f"arn:aws:codecommit:eu-central-1:{ACCOUNT_ID}:repository_one"
|
||||
)
|
||||
response.get("repositoryMetadata").get("accountId").should.equal(ACCOUNT_ID)
|
||||
|
||||
@ -162,9 +144,7 @@ def test_get_repository():
|
||||
ex.operation_name.should.equal("GetRepository")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("RepositoryDoesNotExistException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"{0} does not exist".format(repository_name)
|
||||
)
|
||||
ex.response["Error"]["Message"].should.equal(f"{repository_name} does not exist")
|
||||
|
||||
|
||||
@mock_codecommit
|
||||
|
@ -459,7 +459,7 @@ def test_list_tags_for_resource():
|
||||
create_basic_codepipeline(client, name)
|
||||
|
||||
response = client.list_tags_for_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name)
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}"
|
||||
)
|
||||
response["tags"].should.equal([{"key": "key", "value": "value"}])
|
||||
|
||||
@ -488,12 +488,12 @@ def test_tag_resource():
|
||||
create_basic_codepipeline(client, name)
|
||||
|
||||
client.tag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name),
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}",
|
||||
tags=[{"key": "key-2", "value": "value-2"}],
|
||||
)
|
||||
|
||||
response = client.list_tags_for_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name)
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}"
|
||||
)
|
||||
response["tags"].should.equal(
|
||||
[{"key": "key", "value": "value"}, {"key": "key-2", "value": "value-2"}]
|
||||
@ -521,7 +521,7 @@ def test_tag_resource_errors():
|
||||
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.tag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name),
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}",
|
||||
tags=[{"key": "aws:key", "value": "value"}],
|
||||
)
|
||||
ex = e.value
|
||||
@ -536,20 +536,15 @@ def test_tag_resource_errors():
|
||||
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.tag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name),
|
||||
tags=[
|
||||
{"key": "key-{}".format(i), "value": "value-{}".format(i)}
|
||||
for i in range(50)
|
||||
],
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}",
|
||||
tags=[{"key": f"key-{i}", "value": f"value-{i}"} for i in range(50)],
|
||||
)
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("TagResource")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("TooManyTagsException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Tag limit exceeded for resource [arn:aws:codepipeline:us-east-1:123456789012:{}].".format(
|
||||
name
|
||||
)
|
||||
f"Tag limit exceeded for resource [arn:aws:codepipeline:us-east-1:123456789012:{name}]."
|
||||
)
|
||||
|
||||
|
||||
@ -560,23 +555,23 @@ def test_untag_resource():
|
||||
create_basic_codepipeline(client, name)
|
||||
|
||||
response = client.list_tags_for_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name)
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}"
|
||||
)
|
||||
response["tags"].should.equal([{"key": "key", "value": "value"}])
|
||||
|
||||
client.untag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name),
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}",
|
||||
tagKeys=["key"],
|
||||
)
|
||||
|
||||
response = client.list_tags_for_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name)
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}"
|
||||
)
|
||||
response["tags"].should.have.length_of(0)
|
||||
|
||||
# removing a not existing tag should raise no exception
|
||||
client.untag_resource(
|
||||
resourceArn="arn:aws:codepipeline:us-east-1:123456789012:{}".format(name),
|
||||
resourceArn=f"arn:aws:codepipeline:us-east-1:123456789012:{name}",
|
||||
tagKeys=["key"],
|
||||
)
|
||||
|
||||
|
@ -44,9 +44,7 @@ def test_create_identity_pool():
|
||||
AllowUnauthenticatedIdentities=False,
|
||||
SupportedLoginProviders={"graph.facebook.com": "123456789012345"},
|
||||
DeveloperProviderName="devname",
|
||||
OpenIdConnectProviderARNs=[
|
||||
"arn:aws:rds:eu-west-2:{}:db:mysql-db".format(ACCOUNT_ID)
|
||||
],
|
||||
OpenIdConnectProviderARNs=[f"arn:aws:rds:eu-west-2:{ACCOUNT_ID}:db:mysql-db"],
|
||||
CognitoIdentityProviders=[
|
||||
{
|
||||
"ProviderName": "testprovider",
|
||||
@ -54,7 +52,7 @@ def test_create_identity_pool():
|
||||
"ServerSideTokenCheck": True,
|
||||
}
|
||||
],
|
||||
SamlProviderARNs=["arn:aws:rds:eu-west-2:{}:db:mysql-db".format(ACCOUNT_ID)],
|
||||
SamlProviderARNs=[f"arn:aws:rds:eu-west-2:{ACCOUNT_ID}:db:mysql-db"],
|
||||
)
|
||||
assert result["IdentityPoolId"] != ""
|
||||
|
||||
@ -68,9 +66,7 @@ def test_describe_identity_pool():
|
||||
AllowUnauthenticatedIdentities=False,
|
||||
SupportedLoginProviders={"graph.facebook.com": "123456789012345"},
|
||||
DeveloperProviderName="devname",
|
||||
OpenIdConnectProviderARNs=[
|
||||
"arn:aws:rds:eu-west-2:{}:db:mysql-db".format(ACCOUNT_ID)
|
||||
],
|
||||
OpenIdConnectProviderARNs=[f"arn:aws:rds:eu-west-2:{ACCOUNT_ID}:db:mysql-db"],
|
||||
CognitoIdentityProviders=[
|
||||
{
|
||||
"ProviderName": "testprovider",
|
||||
@ -78,7 +74,7 @@ def test_describe_identity_pool():
|
||||
"ServerSideTokenCheck": True,
|
||||
}
|
||||
],
|
||||
SamlProviderARNs=["arn:aws:rds:eu-west-2:{}:db:mysql-db".format(ACCOUNT_ID)],
|
||||
SamlProviderARNs=[f"arn:aws:rds:eu-west-2:{ACCOUNT_ID}:db:mysql-db"],
|
||||
)
|
||||
|
||||
result = conn.describe_identity_pool(IdentityPoolId=res["IdentityPoolId"])
|
||||
|
@ -35,9 +35,7 @@ def test_create_user_pool():
|
||||
|
||||
result["UserPool"]["Id"].should.match(r"[\w-]+_[0-9a-zA-Z]+")
|
||||
result["UserPool"]["Arn"].should.equal(
|
||||
"arn:aws:cognito-idp:us-west-2:{}:userpool/{}".format(
|
||||
ACCOUNT_ID, result["UserPool"]["Id"]
|
||||
)
|
||||
f"arn:aws:cognito-idp:us-west-2:{ACCOUNT_ID}:userpool/{result['UserPool']['Id']}"
|
||||
)
|
||||
result["UserPool"]["Name"].should.equal(name)
|
||||
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
|
||||
@ -847,9 +845,7 @@ def test_create_user_pool_domain_custom_domain_config():
|
||||
|
||||
domain = str(uuid.uuid4())
|
||||
custom_domain_config = {
|
||||
"CertificateArn": "arn:aws:acm:us-east-1:{}:certificate/123456789012".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
"CertificateArn": f"arn:aws:acm:us-east-1:{ACCOUNT_ID}:certificate/123456789012"
|
||||
}
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
result = conn.create_user_pool_domain(
|
||||
@ -896,9 +892,7 @@ def test_update_user_pool_domain():
|
||||
|
||||
domain = str(uuid.uuid4())
|
||||
custom_domain_config = {
|
||||
"CertificateArn": "arn:aws:acm:us-east-1:{}:certificate/123456789012".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
"CertificateArn": f"arn:aws:acm:us-east-1:{ACCOUNT_ID}:certificate/123456789012"
|
||||
}
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
conn.create_user_pool_domain(UserPoolId=user_pool_id, Domain=domain)
|
||||
@ -2395,7 +2389,7 @@ def test_list_users_inherent_attributes():
|
||||
|
||||
for name, filter_value, response_field, response_field_expected_value in filters:
|
||||
result = conn.list_users(
|
||||
UserPoolId=user_pool_id, Filter='{}="{}"'.format(name, filter_value)
|
||||
UserPoolId=user_pool_id, Filter=f'{name}="{filter_value}"'
|
||||
)
|
||||
result["Users"].should.have.length_of(1)
|
||||
result["Users"][0][response_field].should.equal(response_field_expected_value)
|
||||
@ -2839,8 +2833,8 @@ def test_token_legitimacy():
|
||||
access_token = outputs["access_token"]
|
||||
client_id = outputs["client_id"]
|
||||
username = outputs["username"]
|
||||
issuer = "https://cognito-idp.us-west-2.amazonaws.com/{}".format(
|
||||
outputs["user_pool_id"]
|
||||
issuer = (
|
||||
f"https://cognito-idp.us-west-2.amazonaws.com/{outputs['user_pool_id']}"
|
||||
)
|
||||
id_claims = json.loads(jws.verify(id_token, json_web_key, "RS256"))
|
||||
id_claims["iss"].should.equal(issuer)
|
||||
@ -3451,7 +3445,7 @@ def test_resource_server():
|
||||
ex.value.operation_name.should.equal("CreateResourceServer")
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameterException")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"%s already exists in user pool %s." % (identifier, user_pool_id)
|
||||
f"{identifier} already exists in user pool {user_pool_id}."
|
||||
)
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
@ -4308,8 +4302,6 @@ def verify_kid_header(token):
|
||||
|
||||
|
||||
def fetch_public_keys():
|
||||
keys_url = "https://cognito-idp.{}.amazonaws.com/{}/.well-known/jwks.json".format(
|
||||
"us-west-2", "someuserpoolid"
|
||||
)
|
||||
keys_url = "https://cognito-idp.us-west-2.amazonaws.com/someuserpoolid/.well-known/jwks.json"
|
||||
response = requests.get(keys_url).json()
|
||||
return response["keys"]
|
||||
|
@ -289,9 +289,7 @@ def test_put_configuration_aggregator():
|
||||
AccountAggregationSources=[
|
||||
{"AccountIds": ["012345678910"], "AllAwsRegions": True}
|
||||
],
|
||||
Tags=[
|
||||
{"Key": "{}".format(x), "Value": "{}".format(x)} for x in range(0, 51)
|
||||
],
|
||||
Tags=[{"Key": f"{x}", "Value": f"{x}"} for x in range(0, 51)],
|
||||
)
|
||||
assert (
|
||||
"Member must have length less than or equal to 50"
|
||||
@ -399,9 +397,7 @@ def test_put_configuration_aggregator():
|
||||
account_aggregation_source
|
||||
]
|
||||
assert (
|
||||
"arn:aws:config:us-west-2:{}:config-aggregator/config-aggregator-".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:config:us-west-2:{ACCOUNT_ID}:config-aggregator/config-aggregator-"
|
||||
in result["ConfigurationAggregator"]["ConfigurationAggregatorArn"]
|
||||
)
|
||||
assert (
|
||||
@ -455,7 +451,7 @@ def test_describe_configuration_aggregators():
|
||||
# Make 10 config aggregators:
|
||||
for x in range(0, 10):
|
||||
client.put_configuration_aggregator(
|
||||
ConfigurationAggregatorName="testing{}".format(x),
|
||||
ConfigurationAggregatorName=f"testing{x}",
|
||||
AccountAggregationSources=[
|
||||
{"AccountIds": ["012345678910"], "AllAwsRegions": True}
|
||||
],
|
||||
@ -509,19 +505,19 @@ def test_describe_configuration_aggregators():
|
||||
assert result["NextToken"] == "testing4"
|
||||
assert [
|
||||
agg["ConfigurationAggregatorName"] for agg in result["ConfigurationAggregators"]
|
||||
] == ["testing{}".format(x) for x in range(0, 4)]
|
||||
] == [f"testing{x}" for x in range(0, 4)]
|
||||
result = client.describe_configuration_aggregators(Limit=4, NextToken="testing4")
|
||||
assert len(result["ConfigurationAggregators"]) == 4
|
||||
assert result["NextToken"] == "testing8"
|
||||
assert [
|
||||
agg["ConfigurationAggregatorName"] for agg in result["ConfigurationAggregators"]
|
||||
] == ["testing{}".format(x) for x in range(4, 8)]
|
||||
] == [f"testing{x}" for x in range(4, 8)]
|
||||
result = client.describe_configuration_aggregators(Limit=4, NextToken="testing8")
|
||||
assert len(result["ConfigurationAggregators"]) == 2
|
||||
assert not result.get("NextToken")
|
||||
assert [
|
||||
agg["ConfigurationAggregatorName"] for agg in result["ConfigurationAggregators"]
|
||||
] == ["testing{}".format(x) for x in range(8, 10)]
|
||||
] == [f"testing{x}" for x in range(8, 10)]
|
||||
|
||||
# Test Pagination with Filtering:
|
||||
result = client.describe_configuration_aggregators(
|
||||
@ -560,9 +556,7 @@ def test_put_aggregation_authorization():
|
||||
client.put_aggregation_authorization(
|
||||
AuthorizedAccountId="012345678910",
|
||||
AuthorizedAwsRegion="us-west-2",
|
||||
Tags=[
|
||||
{"Key": "{}".format(x), "Value": "{}".format(x)} for x in range(0, 51)
|
||||
],
|
||||
Tags=[{"Key": f"{x}", "Value": f"{x}"} for x in range(0, 51)],
|
||||
)
|
||||
assert (
|
||||
"Member must have length less than or equal to 50"
|
||||
@ -626,10 +620,9 @@ def test_put_aggregation_authorization():
|
||||
Tags=[{"Key": "tag", "Value": "a"}],
|
||||
)
|
||||
|
||||
assert result["AggregationAuthorization"][
|
||||
"AggregationAuthorizationArn"
|
||||
] == "arn:aws:config:us-west-2:{}:aggregation-authorization/012345678910/us-east-1".format(
|
||||
ACCOUNT_ID
|
||||
assert (
|
||||
result["AggregationAuthorization"]["AggregationAuthorizationArn"]
|
||||
== f"arn:aws:config:us-west-2:{ACCOUNT_ID}:aggregation-authorization/012345678910/us-east-1"
|
||||
)
|
||||
assert result["AggregationAuthorization"]["AuthorizedAccountId"] == "012345678910"
|
||||
assert result["AggregationAuthorization"]["AuthorizedAwsRegion"] == "us-east-1"
|
||||
@ -641,10 +634,9 @@ def test_put_aggregation_authorization():
|
||||
result = client.put_aggregation_authorization(
|
||||
AuthorizedAccountId="012345678910", AuthorizedAwsRegion="us-east-1"
|
||||
)
|
||||
assert result["AggregationAuthorization"][
|
||||
"AggregationAuthorizationArn"
|
||||
] == "arn:aws:config:us-west-2:{}:aggregation-authorization/012345678910/us-east-1".format(
|
||||
ACCOUNT_ID
|
||||
assert (
|
||||
result["AggregationAuthorization"]["AggregationAuthorizationArn"]
|
||||
== f"arn:aws:config:us-west-2:{ACCOUNT_ID}:aggregation-authorization/012345678910/us-east-1"
|
||||
)
|
||||
assert result["AggregationAuthorization"]["AuthorizedAccountId"] == "012345678910"
|
||||
assert result["AggregationAuthorization"]["AuthorizedAwsRegion"] == "us-east-1"
|
||||
@ -661,7 +653,7 @@ def test_describe_aggregation_authorizations():
|
||||
# Make 10 account authorizations:
|
||||
for i in range(0, 10):
|
||||
client.put_aggregation_authorization(
|
||||
AuthorizedAccountId="{}".format(str(i) * 12),
|
||||
AuthorizedAccountId=f"{str(i) * 12}",
|
||||
AuthorizedAwsRegion="us-west-2",
|
||||
)
|
||||
|
||||
@ -679,7 +671,7 @@ def test_describe_aggregation_authorizations():
|
||||
assert result["NextToken"] == ("4" * 12) + "/us-west-2"
|
||||
assert [
|
||||
auth["AuthorizedAccountId"] for auth in result["AggregationAuthorizations"]
|
||||
] == ["{}".format(str(x) * 12) for x in range(0, 4)]
|
||||
] == [f"{str(x) * 12}" for x in range(0, 4)]
|
||||
|
||||
result = client.describe_aggregation_authorizations(
|
||||
Limit=4, NextToken=("4" * 12) + "/us-west-2"
|
||||
@ -688,7 +680,7 @@ def test_describe_aggregation_authorizations():
|
||||
assert result["NextToken"] == ("8" * 12) + "/us-west-2"
|
||||
assert [
|
||||
auth["AuthorizedAccountId"] for auth in result["AggregationAuthorizations"]
|
||||
] == ["{}".format(str(x) * 12) for x in range(4, 8)]
|
||||
] == [f"{str(x) * 12}" for x in range(4, 8)]
|
||||
|
||||
result = client.describe_aggregation_authorizations(
|
||||
Limit=4, NextToken=("8" * 12) + "/us-west-2"
|
||||
@ -697,7 +689,7 @@ def test_describe_aggregation_authorizations():
|
||||
assert not result.get("NextToken")
|
||||
assert [
|
||||
auth["AuthorizedAccountId"] for auth in result["AggregationAuthorizations"]
|
||||
] == ["{}".format(str(x) * 12) for x in range(8, 10)]
|
||||
] == [f"{str(x) * 12}" for x in range(8, 10)]
|
||||
|
||||
# Test with an invalid filter:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
@ -1252,7 +1244,7 @@ def test_list_discovered_resource():
|
||||
s3_client = boto3.client("s3", region_name="us-west-2")
|
||||
for x in range(0, 10):
|
||||
s3_client.create_bucket(
|
||||
Bucket="bucket{}".format(x),
|
||||
Bucket=f"bucket{x}",
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-2"},
|
||||
)
|
||||
|
||||
@ -1269,8 +1261,8 @@ def test_list_discovered_resource():
|
||||
for x in range(0, 10):
|
||||
assert result["resourceIdentifiers"][x] == {
|
||||
"resourceType": "AWS::S3::Bucket",
|
||||
"resourceId": "bucket{}".format(x),
|
||||
"resourceName": "bucket{}".format(x),
|
||||
"resourceId": f"bucket{x}",
|
||||
"resourceName": f"bucket{x}",
|
||||
}
|
||||
assert not result.get("nextToken")
|
||||
|
||||
@ -1330,7 +1322,7 @@ def test_list_discovered_resource():
|
||||
)
|
||||
|
||||
# More than 20 resourceIds:
|
||||
resource_ids = ["{}".format(x) for x in range(0, 21)]
|
||||
resource_ids = [f"{x}" for x in range(0, 21)]
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.list_discovered_resources(
|
||||
resourceType="AWS::S3::Bucket", resourceIds=resource_ids
|
||||
@ -1378,14 +1370,14 @@ def test_list_aggregate_discovered_resource():
|
||||
s3_client = boto3.client("s3", region_name="us-west-2")
|
||||
for x in range(0, 10):
|
||||
s3_client.create_bucket(
|
||||
Bucket="bucket{}".format(x),
|
||||
Bucket=f"bucket{x}",
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-2"},
|
||||
)
|
||||
|
||||
s3_client_eu = boto3.client("s3", region_name="eu-west-1")
|
||||
for x in range(10, 12):
|
||||
s3_client_eu.create_bucket(
|
||||
Bucket="eu-bucket{}".format(x),
|
||||
Bucket=f"eu-bucket{x}",
|
||||
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
|
||||
)
|
||||
|
||||
@ -1398,16 +1390,16 @@ def test_list_aggregate_discovered_resource():
|
||||
assert result["ResourceIdentifiers"][x] == {
|
||||
"SourceAccountId": ACCOUNT_ID,
|
||||
"ResourceType": "AWS::S3::Bucket",
|
||||
"ResourceId": "bucket{}".format(x),
|
||||
"ResourceName": "bucket{}".format(x),
|
||||
"ResourceId": f"bucket{x}",
|
||||
"ResourceName": f"bucket{x}",
|
||||
"SourceRegion": "us-west-2",
|
||||
}
|
||||
for x in range(11, 12):
|
||||
assert result["ResourceIdentifiers"][x] == {
|
||||
"SourceAccountId": ACCOUNT_ID,
|
||||
"ResourceType": "AWS::S3::Bucket",
|
||||
"ResourceId": "eu-bucket{}".format(x),
|
||||
"ResourceName": "eu-bucket{}".format(x),
|
||||
"ResourceId": f"eu-bucket{x}",
|
||||
"ResourceName": f"eu-bucket{x}",
|
||||
"SourceRegion": "eu-west-1",
|
||||
}
|
||||
|
||||
@ -1519,7 +1511,7 @@ def test_get_resource_config_history():
|
||||
s3_client = boto3.client("s3", region_name="us-west-2")
|
||||
for x in range(0, 10):
|
||||
s3_client.create_bucket(
|
||||
Bucket="bucket{}".format(x),
|
||||
Bucket=f"bucket{x}",
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-2"},
|
||||
)
|
||||
|
||||
@ -1580,18 +1572,18 @@ def test_batch_get_resource_config():
|
||||
s3_client = boto3.client("s3", region_name="us-west-2")
|
||||
for x in range(0, 10):
|
||||
s3_client.create_bucket(
|
||||
Bucket="bucket{}".format(x),
|
||||
Bucket=f"bucket{x}",
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-2"},
|
||||
)
|
||||
|
||||
# Get them all:
|
||||
keys = [
|
||||
{"resourceType": "AWS::S3::Bucket", "resourceId": "bucket{}".format(x)}
|
||||
{"resourceType": "AWS::S3::Bucket", "resourceId": f"bucket{x}"}
|
||||
for x in range(0, 10)
|
||||
]
|
||||
result = client.batch_get_resource_config(resourceKeys=keys)
|
||||
assert len(result["baseConfigurationItems"]) == 10
|
||||
buckets_missing = ["bucket{}".format(x) for x in range(0, 10)]
|
||||
buckets_missing = [f"bucket{x}" for x in range(0, 10)]
|
||||
for r in result["baseConfigurationItems"]:
|
||||
buckets_missing.remove(r["resourceName"])
|
||||
|
||||
@ -1656,22 +1648,22 @@ def test_batch_get_aggregate_resource_config():
|
||||
s3_client = boto3.client("s3", region_name="us-west-2")
|
||||
for x in range(0, 10):
|
||||
s3_client.create_bucket(
|
||||
Bucket="bucket{}".format(x),
|
||||
Bucket=f"bucket{x}",
|
||||
CreateBucketConfiguration={"LocationConstraint": "us-west-2"},
|
||||
)
|
||||
s3_client.put_bucket_tagging(
|
||||
Bucket="bucket{}".format(x),
|
||||
Bucket=f"bucket{x}",
|
||||
Tagging={"TagSet": [{"Key": "Some", "Value": "Tag"}]},
|
||||
)
|
||||
|
||||
s3_client_eu = boto3.client("s3", region_name="eu-west-1")
|
||||
for x in range(10, 12):
|
||||
s3_client_eu.create_bucket(
|
||||
Bucket="eu-bucket{}".format(x),
|
||||
Bucket=f"eu-bucket{x}",
|
||||
CreateBucketConfiguration={"LocationConstraint": "eu-west-1"},
|
||||
)
|
||||
s3_client.put_bucket_tagging(
|
||||
Bucket="eu-bucket{}".format(x),
|
||||
Bucket=f"eu-bucket{x}",
|
||||
Tagging={"TagSet": [{"Key": "Some", "Value": "Tag"}]},
|
||||
)
|
||||
|
||||
@ -1681,7 +1673,7 @@ def test_batch_get_aggregate_resource_config():
|
||||
"SourceAccountId": ACCOUNT_ID,
|
||||
"SourceRegion": "us-west-2",
|
||||
"ResourceType": "AWS::S3::Bucket",
|
||||
"ResourceId": "bucket{}".format(x),
|
||||
"ResourceId": f"bucket{x}",
|
||||
}
|
||||
for x in range(0, 10)
|
||||
]
|
||||
@ -1690,7 +1682,7 @@ def test_batch_get_aggregate_resource_config():
|
||||
"SourceAccountId": ACCOUNT_ID,
|
||||
"SourceRegion": "eu-west-1",
|
||||
"ResourceType": "AWS::S3::Bucket",
|
||||
"ResourceId": "eu-bucket{}".format(x),
|
||||
"ResourceId": f"eu-bucket{x}",
|
||||
}
|
||||
for x in range(10, 12)
|
||||
]
|
||||
@ -1704,8 +1696,8 @@ def test_batch_get_aggregate_resource_config():
|
||||
|
||||
# Verify all the buckets are there:
|
||||
assert len(result["BaseConfigurationItems"]) == 12
|
||||
missing_buckets = ["bucket{}".format(x) for x in range(0, 10)] + [
|
||||
"eu-bucket{}".format(x) for x in range(10, 12)
|
||||
missing_buckets = [f"bucket{x}" for x in range(0, 10)] + [
|
||||
f"eu-bucket{x}" for x in range(10, 12)
|
||||
]
|
||||
|
||||
for r in result["BaseConfigurationItems"]:
|
||||
@ -2063,7 +2055,7 @@ def test_get_organization_conformance_pack_detailed_status():
|
||||
status = response["OrganizationConformancePackDetailedStatuses"][0]
|
||||
status["AccountId"].should.equal(ACCOUNT_ID)
|
||||
status["ConformancePackName"].should.equal(
|
||||
"OrgConformsPack-{}".format(arn[arn.rfind("/") + 1 :])
|
||||
f"OrgConformsPack-{arn[arn.rfind('/') + 1 :]}"
|
||||
)
|
||||
status["Status"].should.equal("CREATE_SUCCESSFUL")
|
||||
update_time = status["LastUpdateTime"]
|
||||
@ -2085,7 +2077,7 @@ def test_get_organization_conformance_pack_detailed_status():
|
||||
status = response["OrganizationConformancePackDetailedStatuses"][0]
|
||||
status["AccountId"].should.equal(ACCOUNT_ID)
|
||||
status["ConformancePackName"].should.equal(
|
||||
"OrgConformsPack-{}".format(arn[arn.rfind("/") + 1 :])
|
||||
f"OrgConformsPack-{arn[arn.rfind('/') + 1 :]}"
|
||||
)
|
||||
status["Status"].should.equal("UPDATE_SUCCESSFUL")
|
||||
status["LastUpdateTime"].should.be.greater_than(update_time)
|
||||
|
@ -258,11 +258,7 @@ def test_access_denied_with_no_policy():
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
user_name=user_name,
|
||||
operation="ec2:DescribeInstances",
|
||||
)
|
||||
f"User: arn:aws:iam::{ACCOUNT_ID}:user/{user_name} is not authorized to perform: ec2:DescribeInstances"
|
||||
)
|
||||
|
||||
|
||||
@ -288,11 +284,7 @@ def test_access_denied_with_not_allowing_policy():
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
user_name=user_name,
|
||||
operation="ec2:DescribeInstances",
|
||||
)
|
||||
f"User: arn:aws:iam::{ACCOUNT_ID}:user/{user_name} is not authorized to perform: ec2:DescribeInstances"
|
||||
)
|
||||
|
||||
|
||||
@ -324,9 +316,7 @@ def test_access_denied_for_run_instances():
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID, user_name=user_name, operation="ec2:RunInstances"
|
||||
)
|
||||
f"User: arn:aws:iam::{ACCOUNT_ID}:user/{user_name} is not authorized to perform: ec2:RunInstances"
|
||||
)
|
||||
|
||||
|
||||
@ -355,9 +345,7 @@ def test_access_denied_with_denying_policy():
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID, user_name=user_name, operation="ec2:CreateVpc"
|
||||
)
|
||||
f"User: arn:aws:iam::{ACCOUNT_ID}:user/{user_name} is not authorized to perform: ec2:CreateVpc"
|
||||
)
|
||||
|
||||
|
||||
@ -535,9 +523,7 @@ def test_access_denied_with_many_irrelevant_policies():
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:iam::{account_id}:user/{user_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID, user_name=user_name, operation="ec2:CreateKeyPair"
|
||||
)
|
||||
f"User: arn:aws:iam::{ACCOUNT_ID}:user/{user_name} is not authorized to perform: ec2:CreateKeyPair"
|
||||
)
|
||||
|
||||
|
||||
@ -552,9 +538,7 @@ def test_allowed_with_temporary_credentials():
|
||||
"Version": "2012-10-17",
|
||||
"Statement": {
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": "arn:aws:iam::{account_id}:root".format(account_id=ACCOUNT_ID)
|
||||
},
|
||||
"Principal": {"AWS": f"arn:aws:iam::{ACCOUNT_ID}:root"},
|
||||
"Action": "sts:AssumeRole",
|
||||
},
|
||||
}
|
||||
@ -607,9 +591,7 @@ def test_access_denied_with_temporary_credentials():
|
||||
"Version": "2012-10-17",
|
||||
"Statement": {
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": "arn:aws:iam::{account_id}:root".format(account_id=ACCOUNT_ID)
|
||||
},
|
||||
"Principal": {"AWS": f"arn:aws:iam::{ACCOUNT_ID}:root"},
|
||||
"Action": "sts:AssumeRole",
|
||||
},
|
||||
}
|
||||
@ -638,12 +620,7 @@ def test_access_denied_with_temporary_credentials():
|
||||
ex.value.response["Error"]["Code"].should.equal("AccessDenied")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(403)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"User: arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name} is not authorized to perform: {operation}".format(
|
||||
account_id=ACCOUNT_ID,
|
||||
role_name=role_name,
|
||||
session_name=session_name,
|
||||
operation="rds:CreateDBInstance",
|
||||
)
|
||||
f"User: arn:aws:sts::{ACCOUNT_ID}:assumed-role/{role_name}/{session_name} is not authorized to perform: rds:CreateDBInstance"
|
||||
)
|
||||
|
||||
|
||||
@ -753,9 +730,7 @@ def test_s3_invalid_token_with_temporary_credentials():
|
||||
"Version": "2012-10-17",
|
||||
"Statement": {
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": "arn:aws:iam::{account_id}:root".format(account_id=ACCOUNT_ID)
|
||||
},
|
||||
"Principal": {"AWS": f"arn:aws:iam::{ACCOUNT_ID}:root"},
|
||||
"Action": "sts:AssumeRole",
|
||||
},
|
||||
}
|
||||
|
@ -11,13 +11,13 @@ else:
|
||||
|
||||
@mock_ec2
|
||||
def test_latest_meta_data():
|
||||
res = requests.get("{0}/latest/meta-data/".format(BASE_URL))
|
||||
res = requests.get(f"{BASE_URL}/latest/meta-data/")
|
||||
res.content.should.equal(b"iam")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_iam():
|
||||
res = requests.get("{0}/latest/meta-data/iam".format(BASE_URL))
|
||||
res = requests.get(f"{BASE_URL}/latest/meta-data/iam")
|
||||
json_response = res.json()
|
||||
default_role = json_response["security-credentials"]["default-role"]
|
||||
default_role.should.contain("AccessKeyId")
|
||||
@ -28,16 +28,14 @@ def test_meta_data_iam():
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_security_credentials():
|
||||
res = requests.get(
|
||||
"{0}/latest/meta-data/iam/security-credentials/".format(BASE_URL)
|
||||
)
|
||||
res = requests.get(f"{BASE_URL}/latest/meta-data/iam/security-credentials/")
|
||||
res.content.should.equal(b"default-role")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_default_role():
|
||||
res = requests.get(
|
||||
"{0}/latest/meta-data/iam/security-credentials/default-role".format(BASE_URL)
|
||||
f"{BASE_URL}/latest/meta-data/iam/security-credentials/default-role"
|
||||
)
|
||||
json_response = res.json()
|
||||
json_response.should.contain("AccessKeyId")
|
||||
|
@ -21,7 +21,7 @@ def test_reset_api():
|
||||
conn.create_queue(QueueName="queue1")
|
||||
conn.list_queues()["QueueUrls"].should.have.length_of(1)
|
||||
|
||||
res = requests.post("{base_url}/moto-api/reset".format(base_url=base_url))
|
||||
res = requests.post(f"{base_url}/moto-api/reset")
|
||||
res.content.should.equal(b'{"status": "ok"}')
|
||||
|
||||
conn.list_queues().shouldnt.contain("QueueUrls") # No more queues
|
||||
@ -32,7 +32,7 @@ def test_data_api():
|
||||
conn = boto3.client("sqs", region_name="us-west-1")
|
||||
conn.create_queue(QueueName="queue1")
|
||||
|
||||
res = requests.post("{base_url}/moto-api/data.json".format(base_url=base_url))
|
||||
res = requests.post(f"{base_url}/moto-api/data.json")
|
||||
queues = res.json()["sqs"]["Queue"]
|
||||
len(queues).should.equal(1)
|
||||
queue = queues[0]
|
||||
|
@ -8,7 +8,7 @@ from moto import mock_s3, mock_sts, mock_sqs, settings
|
||||
|
||||
@mock_sqs
|
||||
@pytest.mark.network
|
||||
def test_passthrough_requests():
|
||||
def test_passthrough_requests() -> None:
|
||||
conn = boto3.client("sqs", region_name="us-west-1")
|
||||
conn.create_queue(QueueName="queue1")
|
||||
|
||||
@ -19,7 +19,7 @@ def test_passthrough_requests():
|
||||
if not settings.TEST_SERVER_MODE:
|
||||
|
||||
@mock_sqs
|
||||
def test_requests_to_amazon_subdomains_dont_work():
|
||||
def test_requests_to_amazon_subdomains_dont_work() -> None:
|
||||
res = requests.get("https://fakeservice.amazonaws.com/foo/bar")
|
||||
assert res.content == b"The method is not implemented"
|
||||
assert res.status_code == 400
|
||||
@ -27,7 +27,7 @@ if not settings.TEST_SERVER_MODE:
|
||||
|
||||
@mock_sts
|
||||
@mock_s3
|
||||
def test_decorator_ordering():
|
||||
def test_decorator_ordering() -> None:
|
||||
"""
|
||||
https://github.com/spulec/moto/issues/3790#issuecomment-803979809
|
||||
"""
|
||||
@ -49,4 +49,4 @@ def test_decorator_ordering():
|
||||
)
|
||||
|
||||
resp = requests.get(presigned_url)
|
||||
resp.status_code.should.equal(200)
|
||||
resp.status_code.should.equal(200) # type: ignore[attr-defined]
|
||||
|
@ -19,11 +19,11 @@ class TestMockBucketStartingWithServiceName:
|
||||
@pytest.mark.parametrize("service_name,decorator", service_names)
|
||||
def test_bucketname_starting_with_service_name(self, service_name, decorator):
|
||||
|
||||
decorator = getattr(moto, "mock_{}".format(service_name))
|
||||
decorator = getattr(moto, f"mock_{service_name}")
|
||||
with decorator():
|
||||
with mock_s3():
|
||||
s3_client = boto3.client("s3", "eu-west-1")
|
||||
bucket_name = "{}-bucket".format(service_name)
|
||||
bucket_name = f"{service_name}-bucket"
|
||||
s3_client.create_bucket(
|
||||
ACL="private",
|
||||
Bucket=bucket_name,
|
||||
|
@ -157,7 +157,7 @@ def test_listing_pipelines_boto3():
|
||||
def test_listing_paginated_pipelines_boto3():
|
||||
conn = boto3.client("datapipeline", region_name="us-west-2")
|
||||
for i in range(100):
|
||||
conn.create_pipeline(name="mypipeline%d" % i, uniqueId="some-unique-id%d" % i)
|
||||
conn.create_pipeline(name=f"mypipeline{i}", uniqueId=f"some-unique-id{i}")
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
|
@ -145,7 +145,7 @@ def test_list_table_tags_paginated():
|
||||
table_description = conn.describe_table(TableName=name)
|
||||
arn = table_description["Table"]["TableArn"]
|
||||
for i in range(11):
|
||||
tags = [{"Key": "TestTag%d" % i, "Value": "TestValue"}]
|
||||
tags = [{"Key": f"TestTag{i}", "Value": "TestValue"}]
|
||||
conn.tag_resource(ResourceArn=arn, Tags=tags)
|
||||
resp = conn.list_tags_of_resource(ResourceArn=arn)
|
||||
assert len(resp["Tags"]) == 10
|
||||
@ -3861,7 +3861,7 @@ def test_transact_write_items_put_conditional_expressions():
|
||||
{
|
||||
"Put": {
|
||||
"Item": {
|
||||
"id": {"S": "foo{}".format(str(i))},
|
||||
"id": {"S": f"foo{i}"},
|
||||
"foo": {"S": "bar"},
|
||||
},
|
||||
"TableName": "test-table",
|
||||
@ -4285,9 +4285,7 @@ def assert_correct_client_error(
|
||||
braces = braces or ["{", "}"]
|
||||
assert client_error.response["Error"]["Code"] == code
|
||||
if message_values is not None:
|
||||
values_string = "{open_brace}(?P<values>.*){close_brace}".format(
|
||||
open_brace=braces[0], close_brace=braces[1]
|
||||
)
|
||||
values_string = f"{braces[0]}(?P<values>.*){braces[1]}"
|
||||
re_msg = re.compile(message_template.format(values=values_string))
|
||||
match_result = re_msg.match(client_error.response["Error"]["Message"])
|
||||
assert match_result is not None
|
||||
@ -4886,7 +4884,7 @@ def test_set_attribute_is_dropped_if_empty_after_update_expression(attr_name):
|
||||
client.update_item(
|
||||
TableName=table_name,
|
||||
Key={"customer": {"S": item_key}},
|
||||
UpdateExpression="ADD {} :order".format(attr_name),
|
||||
UpdateExpression=f"ADD {attr_name} :order",
|
||||
ExpressionAttributeNames=expression_attribute_names,
|
||||
ExpressionAttributeValues={":order": {"SS": [set_item]}},
|
||||
)
|
||||
@ -4898,7 +4896,7 @@ def test_set_attribute_is_dropped_if_empty_after_update_expression(attr_name):
|
||||
client.update_item(
|
||||
TableName=table_name,
|
||||
Key={"customer": {"S": item_key}},
|
||||
UpdateExpression="DELETE {} :order".format(attr_name),
|
||||
UpdateExpression=f"DELETE {attr_name} :order",
|
||||
ExpressionAttributeNames=expression_attribute_names,
|
||||
ExpressionAttributeValues={":order": {"SS": [set_item]}},
|
||||
)
|
||||
@ -5193,7 +5191,7 @@ def test_describe_backup_for_non_existent_backup_raises_error():
|
||||
client.describe_backup(BackupArn=non_existent_arn)
|
||||
error = ex.value.response["Error"]
|
||||
error["Code"].should.equal("BackupNotFoundException")
|
||||
error["Message"].should.equal("Backup not found: {}".format(non_existent_arn))
|
||||
error["Message"].should.equal(f"Backup not found: {non_existent_arn}")
|
||||
|
||||
|
||||
@mock_dynamodb
|
||||
@ -5280,7 +5278,7 @@ def test_restore_table_from_non_existent_backup_raises_error():
|
||||
)
|
||||
error = ex.value.response["Error"]
|
||||
error["Code"].should.equal("BackupNotFoundException")
|
||||
error["Message"].should.equal("Backup not found: {}".format(non_existent_arn))
|
||||
error["Message"].should.equal(f"Backup not found: {non_existent_arn}")
|
||||
|
||||
|
||||
@mock_dynamodb
|
||||
@ -5301,7 +5299,7 @@ def test_restore_table_from_backup_raises_error_when_table_already_exists():
|
||||
)
|
||||
error = ex.value.response["Error"]
|
||||
error["Code"].should.equal("TableAlreadyExistsException")
|
||||
error["Message"].should.equal("Table already exists: {}".format(table_name))
|
||||
error["Message"].should.equal(f"Table already exists: {table_name}")
|
||||
|
||||
|
||||
@mock_dynamodb
|
||||
@ -5316,7 +5314,7 @@ def test_restore_table_from_backup():
|
||||
)
|
||||
table = resp.get("TableDescription")
|
||||
for i in range(5):
|
||||
client.put_item(TableName=table_name, Item={"id": {"S": "item %d" % i}})
|
||||
client.put_item(TableName=table_name, Item={"id": {"S": f"item {i}"}})
|
||||
|
||||
backup_arn = (
|
||||
client.create_backup(TableName=table_name, BackupName="backup")
|
||||
@ -5356,7 +5354,7 @@ def test_restore_table_to_point_in_time():
|
||||
)
|
||||
table = resp.get("TableDescription")
|
||||
for i in range(5):
|
||||
client.put_item(TableName=table_name, Item={"id": {"S": "item %d" % i}})
|
||||
client.put_item(TableName=table_name, Item={"id": {"S": f"item {i}"}})
|
||||
|
||||
restored_table_name = "restored-from-pit"
|
||||
restored = client.restore_table_to_point_in_time(
|
||||
@ -5385,7 +5383,7 @@ def test_restore_table_to_point_in_time_raises_error_when_source_not_exist():
|
||||
)
|
||||
error = ex.value.response["Error"]
|
||||
error["Code"].should.equal("SourceTableNotFoundException")
|
||||
error["Message"].should.equal("Source table not found: %s" % table_name)
|
||||
error["Message"].should.equal(f"Source table not found: {table_name}")
|
||||
|
||||
|
||||
@mock_dynamodb
|
||||
@ -5411,7 +5409,7 @@ def test_restore_table_to_point_in_time_raises_error_when_dest_exist():
|
||||
)
|
||||
error = ex.value.response["Error"]
|
||||
error["Code"].should.equal("TableAlreadyExistsException")
|
||||
error["Message"].should.equal("Table already exists: %s" % restored_table_name)
|
||||
error["Message"].should.equal(f"Table already exists: {restored_table_name}")
|
||||
|
||||
|
||||
@mock_dynamodb
|
||||
@ -5422,7 +5420,7 @@ def test_delete_non_existent_backup_raises_error():
|
||||
client.delete_backup(BackupArn=non_existent_arn)
|
||||
error = ex.value.response["Error"]
|
||||
error["Code"].should.equal("BackupNotFoundException")
|
||||
error["Message"].should.equal("Backup not found: {}".format(non_existent_arn))
|
||||
error["Message"].should.equal(f"Backup not found: {non_existent_arn}")
|
||||
|
||||
|
||||
@mock_dynamodb
|
||||
@ -5511,7 +5509,7 @@ def test_describe_endpoints(region):
|
||||
res.should.equal(
|
||||
[
|
||||
{
|
||||
"Address": "dynamodb.{}.amazonaws.com".format(region),
|
||||
"Address": f"dynamodb.{region}.amazonaws.com",
|
||||
"CachePeriodInMinutes": 1440,
|
||||
},
|
||||
]
|
||||
|
@ -253,7 +253,7 @@ def test_execution_of_remove_in_list(table):
|
||||
@pytest.mark.parametrize("attr_name", ["s", "#placeholder"])
|
||||
def test_execution_of_delete_element_from_set(table, attr_name):
|
||||
expression_attribute_names = {"#placeholder": "s"}
|
||||
update_expression = "delete {} :value".format(attr_name)
|
||||
update_expression = f"delete {attr_name} :value"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
item = Item(
|
||||
hash_key=DynamoType({"S": "id"}),
|
||||
@ -276,7 +276,7 @@ def test_execution_of_delete_element_from_set(table, attr_name):
|
||||
assert expected_item == item
|
||||
|
||||
# delete last elements
|
||||
update_expression = "delete {} :value".format(attr_name)
|
||||
update_expression = f"delete {attr_name} :value"
|
||||
update_expression_ast = UpdateExpressionParser.make(update_expression)
|
||||
validated_ast = UpdateExpressionValidator(
|
||||
update_expression_ast,
|
||||
|
@ -192,9 +192,7 @@ def test_boto3_conditions_ignorecase():
|
||||
for expr in between_expressions:
|
||||
results = dynamodb.query(
|
||||
TableName="users",
|
||||
KeyConditionExpression="forum_name = :forum_name and subject {}".format(
|
||||
expr
|
||||
),
|
||||
KeyConditionExpression=f"forum_name = :forum_name and subject {expr}",
|
||||
ExpressionAttributeValues={
|
||||
":forum_name": {"S": "the-key"},
|
||||
":start": {"S": "100"},
|
||||
@ -1023,7 +1021,7 @@ def test_query_pagination():
|
||||
table.put_item(
|
||||
Item={
|
||||
"forum_name": "the-key",
|
||||
"subject": "{0}".format(i),
|
||||
"subject": f"{i}",
|
||||
"username": "johndoe",
|
||||
"created": Decimal("3"),
|
||||
}
|
||||
|
@ -505,7 +505,7 @@ def test_update_settype_item_with_conditions():
|
||||
def test_scan_pagination():
|
||||
table = _create_user_table()
|
||||
|
||||
expected_usernames = ["user{0}".format(i) for i in range(10)]
|
||||
expected_usernames = [f"user{i}" for i in range(10)]
|
||||
for u in expected_usernames:
|
||||
table.put_item(Item={"username": u})
|
||||
|
||||
|
@ -1319,7 +1319,7 @@ def create_table(test_client, name=None, region=None, use_range_key=True):
|
||||
"Content-Type": "application/x-amz-json-1.0",
|
||||
}
|
||||
if region:
|
||||
headers["Host"] = "dynamodb.{}.amazonaws.com".format(region)
|
||||
headers["Host"] = f"dynamodb.{region}.amazonaws.com"
|
||||
request_body = {
|
||||
"TableName": name,
|
||||
"KeySchema": {
|
||||
|
@ -599,8 +599,8 @@ def test_ami_attribute_user_permissions():
|
||||
]
|
||||
permissions.should.equal([])
|
||||
|
||||
USER1 = "".join(["{}".format(random.randint(0, 9)) for _ in range(0, 12)])
|
||||
USER2 = "".join(["{}".format(random.randint(0, 9)) for _ in range(0, 12)])
|
||||
USER1 = "".join([f"{random.randint(0, 9)}" for _ in range(0, 12)])
|
||||
USER2 = "".join([f"{random.randint(0, 9)}" for _ in range(0, 12)])
|
||||
|
||||
ADD_USERS_ARGS = {
|
||||
"ImageId": image.id,
|
||||
@ -673,7 +673,7 @@ def test_ami_describe_executable_users():
|
||||
instance_id = response["Reservations"][0]["Instances"][0]["InstanceId"]
|
||||
image_id = conn.create_image(InstanceId=instance_id, Name="TestImage")["ImageId"]
|
||||
|
||||
USER1 = "".join(["{}".format(random.randint(0, 9)) for _ in range(0, 12)])
|
||||
USER1 = "".join([f"{random.randint(0, 9)}" for _ in range(0, 12)])
|
||||
|
||||
ADD_USER_ARGS = {
|
||||
"ImageId": image_id,
|
||||
@ -706,8 +706,8 @@ def test_ami_describe_executable_users_negative():
|
||||
instance_id = response["Reservations"][0]["Instances"][0]["InstanceId"]
|
||||
image_id = conn.create_image(InstanceId=instance_id, Name="TestImage")["ImageId"]
|
||||
|
||||
USER1 = "".join(["{}".format(random.randint(0, 9)) for _ in range(0, 12)])
|
||||
USER2 = "".join(["{}".format(random.randint(0, 9)) for _ in range(0, 12)])
|
||||
USER1 = "".join([f"{random.randint(0, 9)}" for _ in range(0, 12)])
|
||||
USER2 = "".join([f"{random.randint(0, 9)}" for _ in range(0, 12)])
|
||||
|
||||
ADD_USER_ARGS = {
|
||||
"ImageId": image_id,
|
||||
@ -742,7 +742,7 @@ def test_ami_describe_executable_users_and_filter():
|
||||
"ImageId"
|
||||
]
|
||||
|
||||
USER1 = "".join(["{}".format(random.randint(0, 9)) for _ in range(0, 12)])
|
||||
USER1 = "".join([f"{random.randint(0, 9)}" for _ in range(0, 12)])
|
||||
|
||||
ADD_USER_ARGS = {
|
||||
"ImageId": image_id,
|
||||
@ -1127,7 +1127,7 @@ def test_ami_filter_by_empty_tag():
|
||||
for i in range(10):
|
||||
image = client.create_image(
|
||||
InstanceId=instance.instance_id,
|
||||
Name="MyAMI{}".format(i),
|
||||
Name=f"MyAMI{i}",
|
||||
Description="Test",
|
||||
)
|
||||
|
||||
|
@ -107,9 +107,7 @@ def test_delete_attached_volume():
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Code"].should.equal("VolumeInUse")
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"Volume {0} is currently attached to {1}".format(
|
||||
volume.id, instance["InstanceId"]
|
||||
)
|
||||
f"Volume {volume.id} is currently attached to {instance['InstanceId']}"
|
||||
)
|
||||
|
||||
volume.detach_from_instance(InstanceId=instance["InstanceId"])
|
||||
|
@ -228,9 +228,7 @@ def test_instance_detach_volume_wrong_path():
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidAttachment.NotFound")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.value.response["Error"]["Message"].should.equal(
|
||||
"The volume {0} is not attached to instance {1} as device {2}".format(
|
||||
volume.volume_id, instance.instance_id, "/dev/sdf"
|
||||
)
|
||||
f"The volume {volume.volume_id} is not attached to instance {instance.instance_id} as device /dev/sdf"
|
||||
)
|
||||
|
||||
|
||||
@ -2120,9 +2118,7 @@ def test_describe_instance_attribute():
|
||||
)
|
||||
ex.value.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
message = "Value ({invalid_instance_attribute}) for parameter attribute is invalid. Unknown attribute.".format(
|
||||
invalid_instance_attribute=invalid_instance_attribute
|
||||
)
|
||||
message = f"Value ({invalid_instance_attribute}) for parameter attribute is invalid. Unknown attribute."
|
||||
ex.value.response["Error"]["Message"].should.equal(message)
|
||||
|
||||
|
||||
@ -2227,7 +2223,7 @@ def test_instance_termination_protection():
|
||||
error = ex.value.response["Error"]
|
||||
error["Code"].should.equal("OperationNotPermitted")
|
||||
ex.value.response["Error"]["Message"].should.match(
|
||||
r"The instance '{}' may not be terminated.*$".format(instance_id)
|
||||
rf"The instance '{instance_id}' may not be terminated.*$"
|
||||
)
|
||||
|
||||
# Use alternate request syntax for setting attribute.
|
||||
|
@ -304,9 +304,7 @@ def test_duplicate_network_acl_entry():
|
||||
)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (NetworkAclEntryAlreadyExists) when calling the CreateNetworkAclEntry "
|
||||
"operation: The network acl entry identified by {} already exists.".format(
|
||||
rule_number
|
||||
)
|
||||
f"operation: The network acl entry identified by {rule_number} already exists."
|
||||
)
|
||||
|
||||
|
||||
|
@ -15,9 +15,7 @@ def test_create():
|
||||
prefix_list.should.have.key("AddressFamily").equals("?")
|
||||
prefix_list.should.have.key("State").equals("create-complete")
|
||||
prefix_list.should.have.key("PrefixListArn").equals(
|
||||
"arn:aws:ec2:us-west-1:{}:prefix-list/{}".format(
|
||||
ACCOUNT_ID, prefix_list["PrefixListId"]
|
||||
)
|
||||
f"arn:aws:ec2:us-west-1:{ACCOUNT_ID}:prefix-list/{prefix_list['PrefixListId']}"
|
||||
)
|
||||
prefix_list.should.have.key("PrefixListName").equals("examplelist")
|
||||
prefix_list.should.have.key("MaxEntries").equals(2)
|
||||
|
@ -63,10 +63,10 @@ def test_add_servers_to_multiple_regions_boto3():
|
||||
def test_create_autoscaling_group_boto3():
|
||||
regions = [("us-east-1", "c"), ("ap-northeast-1", "a")]
|
||||
for region, zone in regions:
|
||||
a_zone = "{}{}".format(region, zone)
|
||||
asg_name = "{}_tester_group_{}".format(region, str(uuid4())[0:6])
|
||||
lb_name = "{}_lb_{}".format(region, str(uuid4())[0:6])
|
||||
config_name = "{}_tester_{}".format(region, str(uuid4())[0:6])
|
||||
a_zone = f"{region}{zone}"
|
||||
asg_name = f"{region}_tester_group_{str(uuid4())[0:6]}"
|
||||
lb_name = f"{region}_lb_{str(uuid4())[0:6]}"
|
||||
config_name = f"{region}_tester_{str(uuid4())[0:6]}"
|
||||
|
||||
elb_client = boto3.client("elb", region_name=region)
|
||||
elb_client.create_load_balancer(
|
||||
|
@ -818,9 +818,7 @@ def test_create_route_with_invalid_destination_cidr_block_parameter():
|
||||
)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidParameterValue) when calling the CreateRoute "
|
||||
"operation: Value ({}) for parameter destinationCidrBlock is invalid. This is not a valid CIDR block.".format(
|
||||
destination_cidr_block
|
||||
)
|
||||
f"operation: Value ({destination_cidr_block}) for parameter destinationCidrBlock is invalid. This is not a valid CIDR block."
|
||||
)
|
||||
|
||||
route_table.create_route(
|
||||
|
@ -589,7 +589,7 @@ def test_sec_group_rule_limit(use_vpc):
|
||||
ip_permissions = [
|
||||
{
|
||||
"IpProtocol": "-1",
|
||||
"IpRanges": [{"CidrIp": "{}.0.0.0/0".format(i)} for i in range(110)],
|
||||
"IpRanges": [{"CidrIp": f"{i}.0.0.0/0"} for i in range(110)],
|
||||
}
|
||||
]
|
||||
client.authorize_security_group_ingress(
|
||||
@ -621,7 +621,7 @@ def test_sec_group_rule_limit(use_vpc):
|
||||
permissions = [
|
||||
{
|
||||
"IpProtocol": "-1",
|
||||
"IpRanges": [{"CidrIp": "{}.0.0.0/0".format(i)} for i in range(limit - 1)],
|
||||
"IpRanges": [{"CidrIp": f"{i}.0.0.0/0"} for i in range(limit - 1)],
|
||||
}
|
||||
]
|
||||
client.authorize_security_group_ingress(GroupId=sg.id, IpPermissions=permissions)
|
||||
@ -654,9 +654,7 @@ def test_sec_group_rule_limit(use_vpc):
|
||||
permissions = [
|
||||
{
|
||||
"IpProtocol": "-1",
|
||||
"IpRanges": [
|
||||
{"CidrIp": "{}.0.0.0/0".format(i)} for i in range(1, limit - 1)
|
||||
],
|
||||
"IpRanges": [{"CidrIp": f"{i}.0.0.0/0"} for i in range(1, limit - 1)],
|
||||
}
|
||||
]
|
||||
client.authorize_security_group_egress(GroupId=sg.id, IpPermissions=permissions)
|
||||
@ -1187,7 +1185,7 @@ def test_update_security_group_rule_descriptions_ingress():
|
||||
def test_non_existent_security_group_raises_error_on_authorize():
|
||||
client = boto3.client("ec2", "us-east-1")
|
||||
non_existent_sg = "sg-123abc"
|
||||
expected_error = "The security group '{}' does not exist".format(non_existent_sg)
|
||||
expected_error = f"The security group '{non_existent_sg}' does not exist"
|
||||
authorize_funcs = [
|
||||
client.authorize_security_group_egress,
|
||||
client.authorize_security_group_ingress,
|
||||
|
@ -22,7 +22,7 @@ def spot_config(subnet_id, allocation_strategy="lowestPrice"):
|
||||
"ClientToken": "string",
|
||||
"SpotPrice": "0.12",
|
||||
"TargetCapacity": 6,
|
||||
"IamFleetRole": "arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID),
|
||||
"IamFleetRole": f"arn:aws:iam::{ACCOUNT_ID}:role/fleet",
|
||||
"LaunchSpecifications": [
|
||||
{
|
||||
"ImageId": EXAMPLE_AMI_ID,
|
||||
@ -47,9 +47,7 @@ def spot_config(subnet_id, allocation_strategy="lowestPrice"):
|
||||
],
|
||||
"Monitoring": {"Enabled": True},
|
||||
"SubnetId": subnet_id,
|
||||
"IamInstanceProfile": {
|
||||
"Arn": "arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID)
|
||||
},
|
||||
"IamInstanceProfile": {"Arn": f"arn:aws:iam::{ACCOUNT_ID}:role/fleet"},
|
||||
"EbsOptimized": False,
|
||||
"WeightedCapacity": 2.0,
|
||||
"SpotPrice": "0.13",
|
||||
@ -62,9 +60,7 @@ def spot_config(subnet_id, allocation_strategy="lowestPrice"):
|
||||
"InstanceType": "t2.large",
|
||||
"Monitoring": {"Enabled": True},
|
||||
"SubnetId": subnet_id,
|
||||
"IamInstanceProfile": {
|
||||
"Arn": "arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID)
|
||||
},
|
||||
"IamInstanceProfile": {"Arn": f"arn:aws:iam::{ACCOUNT_ID}:role/fleet"},
|
||||
"EbsOptimized": False,
|
||||
"WeightedCapacity": 4.0,
|
||||
"SpotPrice": "10.00",
|
||||
@ -96,7 +92,7 @@ def test_create_spot_fleet_with_lowest_price():
|
||||
spot_fleet_config["SpotPrice"].should.equal("0.12")
|
||||
spot_fleet_config["TargetCapacity"].should.equal(6)
|
||||
spot_fleet_config["IamFleetRole"].should.equal(
|
||||
"arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/fleet"
|
||||
)
|
||||
spot_fleet_config["AllocationStrategy"].should.equal("lowestPrice")
|
||||
spot_fleet_config["FulfilledCapacity"].should.equal(6.0)
|
||||
@ -107,7 +103,7 @@ def test_create_spot_fleet_with_lowest_price():
|
||||
launch_spec["EbsOptimized"].should.equal(False)
|
||||
launch_spec["SecurityGroups"].should.equal([{"GroupId": "sg-123"}])
|
||||
launch_spec["IamInstanceProfile"].should.equal(
|
||||
{"Arn": "arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID)}
|
||||
{"Arn": f"arn:aws:iam::{ACCOUNT_ID}:role/fleet"}
|
||||
)
|
||||
launch_spec["ImageId"].should.equal(EXAMPLE_AMI_ID)
|
||||
launch_spec["InstanceType"].should.equal("t2.small")
|
||||
|
@ -356,11 +356,7 @@ def test_create_subnet_response_fields():
|
||||
subnet.should.have.key("AssignIpv6AddressOnCreation").which.should.equal(False)
|
||||
subnet.should.have.key("Ipv6Native").which.should.equal(False)
|
||||
|
||||
subnet_arn = "arn:aws:ec2:{region}:{owner_id}:subnet/{subnet_id}".format(
|
||||
region=subnet["AvailabilityZone"][0:-1],
|
||||
owner_id=subnet["OwnerId"],
|
||||
subnet_id=subnet["SubnetId"],
|
||||
)
|
||||
subnet_arn = f"arn:aws:ec2:{subnet['AvailabilityZone'][0:-1]}:{subnet['OwnerId']}:subnet/{subnet['SubnetId']}"
|
||||
subnet.should.have.key("SubnetArn").which.should.equal(subnet_arn)
|
||||
subnet.should.have.key("Ipv6CidrBlockAssociationSet").which.should.equal([])
|
||||
|
||||
@ -393,11 +389,7 @@ def test_describe_subnet_response_fields():
|
||||
subnet.should.have.key("AssignIpv6AddressOnCreation").which.should.equal(False)
|
||||
subnet.should.have.key("Ipv6Native").which.should.equal(False)
|
||||
|
||||
subnet_arn = "arn:aws:ec2:{region}:{owner_id}:subnet/{subnet_id}".format(
|
||||
region=subnet["AvailabilityZone"][0:-1],
|
||||
owner_id=subnet["OwnerId"],
|
||||
subnet_id=subnet["SubnetId"],
|
||||
)
|
||||
subnet_arn = f"arn:aws:ec2:{subnet['AvailabilityZone'][0:-1]}:{subnet['OwnerId']}:subnet/{subnet['SubnetId']}"
|
||||
subnet.should.have.key("SubnetArn").which.should.equal(subnet_arn)
|
||||
subnet.should.have.key("Ipv6CidrBlockAssociationSet").which.should.equal([])
|
||||
|
||||
@ -418,9 +410,7 @@ def test_create_subnet_with_invalid_availability_zone():
|
||||
)
|
||||
assert str(ex.value).startswith(
|
||||
"An error occurred (InvalidParameterValue) when calling the CreateSubnet "
|
||||
"operation: Value ({}) for parameter availabilityZone is invalid. Subnets can currently only be created in the following availability zones: ".format(
|
||||
subnet_availability_zone
|
||||
)
|
||||
f"operation: Value ({subnet_availability_zone}) for parameter availabilityZone is invalid. Subnets can currently only be created in the following availability zones: "
|
||||
)
|
||||
|
||||
|
||||
@ -437,7 +427,7 @@ def test_create_subnet_with_invalid_cidr_range():
|
||||
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidSubnet.Range) when calling the CreateSubnet "
|
||||
"operation: The CIDR '{}' is invalid.".format(subnet_cidr_block)
|
||||
f"operation: The CIDR '{subnet_cidr_block}' is invalid."
|
||||
)
|
||||
|
||||
|
||||
@ -455,7 +445,7 @@ def test_create_subnet_with_invalid_cidr_range_multiple_vpc_cidr_blocks():
|
||||
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidSubnet.Range) when calling the CreateSubnet "
|
||||
"operation: The CIDR '{}' is invalid.".format(subnet_cidr_block)
|
||||
f"operation: The CIDR '{subnet_cidr_block}' is invalid."
|
||||
)
|
||||
|
||||
|
||||
@ -472,9 +462,7 @@ def test_create_subnet_with_invalid_cidr_block_parameter():
|
||||
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidParameterValue) when calling the CreateSubnet "
|
||||
"operation: Value ({}) for parameter cidrBlock is invalid. This is not a valid CIDR block.".format(
|
||||
subnet_cidr_block
|
||||
)
|
||||
f"operation: Value ({subnet_cidr_block}) for parameter cidrBlock is invalid. This is not a valid CIDR block."
|
||||
)
|
||||
|
||||
|
||||
@ -532,9 +520,7 @@ def test_create_subnets_with_overlapping_cidr_blocks():
|
||||
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidSubnet.Conflict) when calling the CreateSubnet "
|
||||
"operation: The CIDR '{}' conflicts with another subnet".format(
|
||||
subnet_cidr_block
|
||||
)
|
||||
f"operation: The CIDR '{subnet_cidr_block}' conflicts with another subnet"
|
||||
)
|
||||
|
||||
|
||||
|
@ -131,7 +131,7 @@ def test_tag_limit_exceeded():
|
||||
instance = ec2.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)[0]
|
||||
tag_list = []
|
||||
for i in range(51):
|
||||
tag_list.append({"Key": "{0:02d}".format(i + 1), "Value": ""})
|
||||
tag_list.append({"Key": f"{i+1:02d}", "Value": ""})
|
||||
|
||||
with pytest.raises(ClientError) as ex:
|
||||
client.create_tags(Resources=[instance.id], Tags=tag_list)
|
||||
|
@ -48,9 +48,7 @@ def test_create_transit_gateway():
|
||||
gateways.should.have.length_of(1)
|
||||
gateways[0].should.have.key("CreationTime")
|
||||
gateways[0].should.have.key("TransitGatewayArn").equal(
|
||||
"arn:aws:ec2:us-east-1:{}:transit-gateway/{}".format(
|
||||
ACCOUNT_ID, gateway["TransitGatewayId"]
|
||||
)
|
||||
f"arn:aws:ec2:us-east-1:{ACCOUNT_ID}:transit-gateway/{gateway['TransitGatewayId']}"
|
||||
)
|
||||
gateways[0]["Options"].should.have.key("AssociationDefaultRouteTableId").equal(
|
||||
gateways[0]["Options"]["PropagationDefaultRouteTableId"]
|
||||
|
@ -399,9 +399,9 @@ def test_vpc_peering_connections_cross_region_accept_wrong_region():
|
||||
ec2_usw1.accept_vpc_peering_connection(VpcPeeringConnectionId=vpc_pcx_usw1.id)
|
||||
cm.value.response["Error"]["Code"].should.equal("OperationNotPermitted")
|
||||
exp_msg = (
|
||||
"Incorrect region ({0}) specified for this request.VPC "
|
||||
"peering connection {1} must be "
|
||||
"accepted in region {2}".format("us-west-1", vpc_pcx_usw1.id, "ap-northeast-1")
|
||||
"Incorrect region (us-west-1) specified for this request.VPC "
|
||||
f"peering connection {vpc_pcx_usw1.id} must be "
|
||||
"accepted in region ap-northeast-1"
|
||||
)
|
||||
cm.value.response["Error"]["Message"].should.equal(exp_msg)
|
||||
|
||||
@ -424,9 +424,9 @@ def test_vpc_peering_connections_cross_region_reject_wrong_region():
|
||||
ec2_usw1.reject_vpc_peering_connection(VpcPeeringConnectionId=vpc_pcx_usw1.id)
|
||||
cm.value.response["Error"]["Code"].should.equal("OperationNotPermitted")
|
||||
exp_msg = (
|
||||
"Incorrect region ({0}) specified for this request.VPC "
|
||||
"peering connection {1} must be accepted or "
|
||||
"rejected in region {2}".format("us-west-1", vpc_pcx_usw1.id, "ap-northeast-1")
|
||||
"Incorrect region (us-west-1) specified for this request.VPC "
|
||||
f"peering connection {vpc_pcx_usw1.id} must be accepted or "
|
||||
"rejected in region ap-northeast-1"
|
||||
)
|
||||
cm.value.response["Error"]["Message"].should.equal(exp_msg)
|
||||
|
||||
|
@ -561,14 +561,12 @@ def test_associate_vpc_ipv4_cidr_block():
|
||||
# Associate/Extend vpc CIDR range up to 5 ciders
|
||||
for i in range(43, 47):
|
||||
response = ec2.meta.client.associate_vpc_cidr_block(
|
||||
VpcId=vpc.id, CidrBlock="10.10.{}.0/24".format(i)
|
||||
VpcId=vpc.id, CidrBlock=f"10.10.{i}.0/24"
|
||||
)
|
||||
response["CidrBlockAssociation"]["CidrBlockState"]["State"].should.equal(
|
||||
"associating"
|
||||
)
|
||||
response["CidrBlockAssociation"]["CidrBlock"].should.equal(
|
||||
"10.10.{}.0/24".format(i)
|
||||
)
|
||||
response["CidrBlockAssociation"]["CidrBlock"].should.equal(f"10.10.{i}.0/24")
|
||||
response["CidrBlockAssociation"]["AssociationId"].should.contain(
|
||||
"vpc-cidr-assoc"
|
||||
)
|
||||
@ -590,9 +588,7 @@ def test_associate_vpc_ipv4_cidr_block():
|
||||
)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (CidrLimitExceeded) when calling the AssociateVpcCidrBlock "
|
||||
"operation: This network '{}' has met its maximum number of allowed CIDRs: 5".format(
|
||||
vpc.id
|
||||
)
|
||||
f"operation: This network '{vpc.id}' has met its maximum number of allowed CIDRs: 5"
|
||||
)
|
||||
|
||||
|
||||
@ -657,8 +653,8 @@ def test_disassociate_vpc_ipv4_cidr_block():
|
||||
)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (OperationNotPermitted) when calling the DisassociateVpcCidrBlock operation: "
|
||||
"The vpc CIDR block with association ID {} may not be disassociated. It is the primary "
|
||||
"IPv4 CIDR block of the VPC".format(vpc_base_cidr_assoc_id)
|
||||
f"The vpc CIDR block with association ID {vpc_base_cidr_assoc_id} may not be disassociated. It is the primary "
|
||||
"IPv4 CIDR block of the VPC"
|
||||
)
|
||||
|
||||
|
||||
@ -738,9 +734,7 @@ def test_vpc_associate_ipv6_cidr_block():
|
||||
)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (CidrLimitExceeded) when calling the AssociateVpcCidrBlock "
|
||||
"operation: This network '{}' has met its maximum number of allowed CIDRs: 1".format(
|
||||
vpc.id
|
||||
)
|
||||
f"operation: This network '{vpc.id}' has met its maximum number of allowed CIDRs: 1"
|
||||
)
|
||||
|
||||
# Test associate ipv6 cidr block after vpc created
|
||||
@ -848,9 +842,7 @@ def test_create_vpc_with_invalid_cidr_block_parameter():
|
||||
ec2.create_vpc(CidrBlock=vpc_cidr_block)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidParameterValue) when calling the CreateVpc "
|
||||
"operation: Value ({}) for parameter cidrBlock is invalid. This is not a valid CIDR block.".format(
|
||||
vpc_cidr_block
|
||||
)
|
||||
f"operation: Value ({vpc_cidr_block}) for parameter cidrBlock is invalid. This is not a valid CIDR block."
|
||||
)
|
||||
|
||||
|
||||
@ -863,7 +855,7 @@ def test_create_vpc_with_invalid_cidr_range():
|
||||
ec2.create_vpc(CidrBlock=vpc_cidr_block)
|
||||
str(ex.value).should.equal(
|
||||
"An error occurred (InvalidVpc.Range) when calling the CreateVpc "
|
||||
"operation: The CIDR '{}' is invalid.".format(vpc_cidr_block)
|
||||
f"operation: The CIDR '{vpc_cidr_block}' is invalid."
|
||||
)
|
||||
|
||||
|
||||
|
@ -22,8 +22,8 @@ from moto.core import DEFAULT_ACCOUNT_ID as ACCOUNT_ID
|
||||
|
||||
def _create_image_digest(contents=None):
|
||||
if not contents:
|
||||
contents = "docker_image{0}".format(int(random() * 10**6))
|
||||
return "sha256:%s" % hashlib.sha256(contents.encode("utf-8")).hexdigest()
|
||||
contents = f"docker_image{int(random() * 10**6)}"
|
||||
return "sha256:" + hashlib.sha256(contents.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def _create_image_manifest():
|
||||
|
@ -130,7 +130,7 @@ def test_put_account_setting_changes_service_arn():
|
||||
response = client.list_services(cluster="dummy-cluster", launchType="FARGATE")
|
||||
service_arn = response["serviceArns"][0]
|
||||
service_arn.should.equal(
|
||||
"arn:aws:ecs:eu-west-1:{}:service/test-ecs-service".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:eu-west-1:{ACCOUNT_ID}:service/test-ecs-service"
|
||||
)
|
||||
|
||||
# Second invocation returns long ARN's by default, after deleting the preference
|
||||
@ -138,9 +138,7 @@ def test_put_account_setting_changes_service_arn():
|
||||
response = client.list_services(cluster="dummy-cluster", launchType="FARGATE")
|
||||
service_arn = response["serviceArns"][0]
|
||||
service_arn.should.equal(
|
||||
"arn:aws:ecs:eu-west-1:{}:service/dummy-cluster/test-ecs-service".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:eu-west-1:{ACCOUNT_ID}:service/dummy-cluster/test-ecs-service"
|
||||
)
|
||||
|
||||
|
||||
|
@ -30,7 +30,7 @@ def test_create_cluster():
|
||||
response = client.create_cluster(clusterName="test_ecs_cluster")
|
||||
response["cluster"]["clusterName"].should.equal("test_ecs_cluster")
|
||||
response["cluster"]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["cluster"]["status"].should.equal("ACTIVE")
|
||||
response["cluster"]["registeredContainerInstancesCount"].should.equal(0)
|
||||
@ -60,10 +60,10 @@ def test_list_clusters():
|
||||
_ = client.create_cluster(clusterName="test_cluster1")
|
||||
response = client.list_clusters()
|
||||
response["clusterArns"].should.contain(
|
||||
"arn:aws:ecs:us-east-2:{}:cluster/test_cluster0".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-2:{ACCOUNT_ID}:cluster/test_cluster0"
|
||||
)
|
||||
response["clusterArns"].should.contain(
|
||||
"arn:aws:ecs:us-east-2:{}:cluster/test_cluster1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-2:{ACCOUNT_ID}:cluster/test_cluster1"
|
||||
)
|
||||
|
||||
|
||||
@ -104,7 +104,7 @@ def test_describe_clusters_missing():
|
||||
response = client.describe_clusters(clusters=["some-cluster"])
|
||||
response["failures"].should.contain(
|
||||
{
|
||||
"arn": "arn:aws:ecs:us-east-1:{}:cluster/some-cluster".format(ACCOUNT_ID),
|
||||
"arn": f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/some-cluster",
|
||||
"reason": "MISSING",
|
||||
}
|
||||
)
|
||||
@ -117,7 +117,7 @@ def test_delete_cluster():
|
||||
response = client.delete_cluster(cluster="test_ecs_cluster")
|
||||
response["cluster"]["clusterName"].should.equal("test_ecs_cluster")
|
||||
response["cluster"]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["cluster"]["status"].should.equal("ACTIVE")
|
||||
response["cluster"]["registeredContainerInstancesCount"].should.equal(0)
|
||||
@ -154,7 +154,7 @@ def test_register_task_definition():
|
||||
response["taskDefinition"]["family"].should.equal("test_ecs_task")
|
||||
response["taskDefinition"]["revision"].should.equal(1)
|
||||
response["taskDefinition"]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["taskDefinition"]["networkMode"].should.equal("bridge")
|
||||
response["taskDefinition"]["volumes"].should.equal([])
|
||||
@ -192,7 +192,7 @@ def test_register_task_definition():
|
||||
|
||||
response["taskDefinition"]["revision"].should.equal(2)
|
||||
response["taskDefinition"]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:2".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:2"
|
||||
)
|
||||
|
||||
# Registering with optional top-level params
|
||||
@ -293,10 +293,10 @@ def test_list_task_definitions():
|
||||
response = client.list_task_definitions()
|
||||
len(response["taskDefinitionArns"]).should.equal(2)
|
||||
response["taskDefinitionArns"][0].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["taskDefinitionArns"][1].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:2".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:2"
|
||||
)
|
||||
|
||||
|
||||
@ -356,10 +356,10 @@ def test_list_task_definitions_with_family_prefix():
|
||||
filtered_response = client.list_task_definitions(familyPrefix="test_ecs_task_a")
|
||||
len(filtered_response["taskDefinitionArns"]).should.equal(2)
|
||||
filtered_response["taskDefinitionArns"][0].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task_a:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task_a:1"
|
||||
)
|
||||
filtered_response["taskDefinitionArns"][1].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task_a:2".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task_a:2"
|
||||
)
|
||||
|
||||
|
||||
@ -419,12 +419,12 @@ def test_describe_task_definitions():
|
||||
)
|
||||
response = client.describe_task_definition(taskDefinition="test_ecs_task")
|
||||
response["taskDefinition"]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:3".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:3"
|
||||
)
|
||||
|
||||
response = client.describe_task_definition(taskDefinition="test_ecs_task:2")
|
||||
response["taskDefinition"]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:2".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:2"
|
||||
)
|
||||
response["taskDefinition"]["taskRoleArn"].should.equal("my-task-role-arn")
|
||||
response["taskDefinition"]["executionRoleArn"].should.equal("my-execution-role-arn")
|
||||
@ -458,7 +458,7 @@ def test_deregister_task_definition_1():
|
||||
type(response["taskDefinition"]).should.be(dict)
|
||||
response["taskDefinition"]["status"].should.equal("INACTIVE")
|
||||
response["taskDefinition"]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["taskDefinition"]["containerDefinitions"][0]["name"].should.equal(
|
||||
"hello_world"
|
||||
@ -526,7 +526,7 @@ def test_create_service():
|
||||
desiredCount=2,
|
||||
)
|
||||
response["service"]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["service"]["desiredCount"].should.equal(2)
|
||||
len(response["service"]["events"]).should.equal(0)
|
||||
@ -534,14 +534,12 @@ def test_create_service():
|
||||
response["service"]["pendingCount"].should.equal(0)
|
||||
response["service"]["runningCount"].should.equal(0)
|
||||
response["service"]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service"
|
||||
)
|
||||
response["service"]["serviceName"].should.equal("test_ecs_service")
|
||||
response["service"]["status"].should.equal("ACTIVE")
|
||||
response["service"]["taskDefinition"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["service"]["schedulingStrategy"].should.equal("REPLICA")
|
||||
response["service"]["launchType"].should.equal("EC2")
|
||||
@ -618,7 +616,7 @@ def test_create_service_scheduling_strategy():
|
||||
schedulingStrategy="DAEMON",
|
||||
)
|
||||
response["service"]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["service"]["desiredCount"].should.equal(2)
|
||||
len(response["service"]["events"]).should.equal(0)
|
||||
@ -626,14 +624,12 @@ def test_create_service_scheduling_strategy():
|
||||
response["service"]["pendingCount"].should.equal(0)
|
||||
response["service"]["runningCount"].should.equal(0)
|
||||
response["service"]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service"
|
||||
)
|
||||
response["service"]["serviceName"].should.equal("test_ecs_service")
|
||||
response["service"]["status"].should.equal("ACTIVE")
|
||||
response["service"]["taskDefinition"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["service"]["schedulingStrategy"].should.equal("DAEMON")
|
||||
|
||||
@ -684,16 +680,8 @@ def test_list_services():
|
||||
desiredCount=2,
|
||||
)
|
||||
|
||||
test_ecs_service1_arn = (
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster1/test_ecs_service1".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
)
|
||||
test_ecs_service2_arn = (
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster1/test_ecs_service2".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
)
|
||||
test_ecs_service1_arn = f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster1/test_ecs_service1"
|
||||
test_ecs_service2_arn = f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster1/test_ecs_service2"
|
||||
|
||||
cluster1_services = client.list_services(cluster="test_ecs_cluster1")
|
||||
len(cluster1_services["serviceArns"]).should.equal(2)
|
||||
@ -766,22 +754,16 @@ def test_describe_services():
|
||||
cluster="test_ecs_cluster",
|
||||
services=[
|
||||
"test_ecs_service1",
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service2".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service2",
|
||||
],
|
||||
)
|
||||
len(response["services"]).should.equal(2)
|
||||
response["services"][0]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service1".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service1"
|
||||
)
|
||||
response["services"][0]["serviceName"].should.equal("test_ecs_service1")
|
||||
response["services"][1]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service2".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service2"
|
||||
)
|
||||
response["services"][1]["serviceName"].should.equal("test_ecs_service2")
|
||||
|
||||
@ -802,9 +784,7 @@ def test_describe_services():
|
||||
cluster="test_ecs_cluster",
|
||||
services=[
|
||||
"test_ecs_service1",
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service2".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service2",
|
||||
],
|
||||
include=["TAGS"],
|
||||
)
|
||||
@ -840,9 +820,7 @@ def test_describe_services_new_arn():
|
||||
cluster="test_ecs_cluster", services=["test_ecs_service1"]
|
||||
)
|
||||
response["services"][0]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service1".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service1"
|
||||
)
|
||||
|
||||
|
||||
@ -889,23 +867,17 @@ def test_describe_services_scheduling_strategy():
|
||||
cluster="test_ecs_cluster",
|
||||
services=[
|
||||
"test_ecs_service1",
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service2".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service2",
|
||||
"test_ecs_service3",
|
||||
],
|
||||
)
|
||||
len(response["services"]).should.equal(3)
|
||||
response["services"][0]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service1".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service1"
|
||||
)
|
||||
response["services"][0]["serviceName"].should.equal("test_ecs_service1")
|
||||
response["services"][1]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service2".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service2"
|
||||
)
|
||||
response["services"][1]["serviceName"].should.equal("test_ecs_service2")
|
||||
|
||||
@ -971,7 +943,7 @@ def test_describe_services_with_known_unknown_services():
|
||||
service_name,
|
||||
"unknown",
|
||||
service_arn,
|
||||
"arn:aws:ecs:eu-central-1:{}:service/unknown-2".format(ACCOUNT_ID),
|
||||
f"arn:aws:ecs:eu-central-1:{ACCOUNT_ID}:service/unknown-2",
|
||||
],
|
||||
)
|
||||
|
||||
@ -987,13 +959,11 @@ def test_describe_services_with_known_unknown_services():
|
||||
sorted(failures, key=lambda item: item["arn"]).should.equal(
|
||||
[
|
||||
{
|
||||
"arn": "arn:aws:ecs:eu-central-1:{}:service/unknown".format(ACCOUNT_ID),
|
||||
"arn": f"arn:aws:ecs:eu-central-1:{ACCOUNT_ID}:service/unknown",
|
||||
"reason": "MISSING",
|
||||
},
|
||||
{
|
||||
"arn": "arn:aws:ecs:eu-central-1:{}:service/unknown-2".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"arn": f"arn:aws:ecs:eu-central-1:{ACCOUNT_ID}:service/unknown-2",
|
||||
"reason": "MISSING",
|
||||
},
|
||||
]
|
||||
@ -1093,7 +1063,7 @@ def test_delete_service():
|
||||
cluster="test_ecs_cluster", service="test_ecs_service"
|
||||
)
|
||||
response["service"]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["service"]["desiredCount"].should.equal(0)
|
||||
len(response["service"]["events"]).should.equal(0)
|
||||
@ -1101,15 +1071,13 @@ def test_delete_service():
|
||||
response["service"]["pendingCount"].should.equal(0)
|
||||
response["service"]["runningCount"].should.equal(0)
|
||||
response["service"]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service"
|
||||
)
|
||||
response["service"]["serviceName"].should.equal("test_ecs_service")
|
||||
response["service"]["status"].should.equal("ACTIVE")
|
||||
response["service"]["schedulingStrategy"].should.equal("REPLICA")
|
||||
response["service"]["taskDefinition"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
|
||||
|
||||
@ -1146,7 +1114,7 @@ def test_delete_service__using_arns():
|
||||
)
|
||||
response = client.delete_service(cluster=cluster_arn, service=service_arn)
|
||||
response["service"]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
|
||||
|
||||
@ -1180,22 +1148,20 @@ def test_delete_service_force():
|
||||
cluster="test_ecs_cluster", service="test_ecs_service", force=True
|
||||
)
|
||||
response["service"]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
len(response["service"]["events"]).should.equal(0)
|
||||
len(response["service"]["loadBalancers"]).should.equal(0)
|
||||
response["service"]["pendingCount"].should.equal(0)
|
||||
response["service"]["runningCount"].should.equal(0)
|
||||
response["service"]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service"
|
||||
)
|
||||
response["service"]["serviceName"].should.equal("test_ecs_service")
|
||||
response["service"]["status"].should.equal("ACTIVE")
|
||||
response["service"]["schedulingStrategy"].should.equal("REPLICA")
|
||||
response["service"]["taskDefinition"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
|
||||
|
||||
@ -1277,9 +1243,7 @@ def test_register_container_instance():
|
||||
response["containerInstance"]["ec2InstanceId"].should.equal(test_instance.id)
|
||||
full_arn = response["containerInstance"]["containerInstanceArn"]
|
||||
arn_part = full_arn.split("/")
|
||||
arn_part[0].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:container-instance".format(ACCOUNT_ID)
|
||||
)
|
||||
arn_part[0].should.equal(f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:container-instance")
|
||||
arn_part[1].should.equal("test_ecs_cluster")
|
||||
arn_part[2].should.equal(str(UUID(arn_part[2])))
|
||||
response["containerInstance"]["status"].should.equal("ACTIVE")
|
||||
@ -1686,16 +1650,16 @@ def test_run_task():
|
||||
)
|
||||
len(response["tasks"]).should.equal(2)
|
||||
response["tasks"][0]["taskArn"].should.contain(
|
||||
"arn:aws:ecs:us-east-1:{}:task/".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task/"
|
||||
)
|
||||
response["tasks"][0]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["tasks"][0]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["tasks"][0]["containerInstanceArn"].should.contain(
|
||||
"arn:aws:ecs:us-east-1:{}:container-instance/".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:container-instance/"
|
||||
)
|
||||
response["tasks"][0]["overrides"].should.equal({})
|
||||
response["tasks"][0]["lastStatus"].should.equal("RUNNING")
|
||||
@ -1835,16 +1799,16 @@ def test_run_task_default_cluster():
|
||||
len(response["tasks"]).should.equal(2)
|
||||
response["tasks"][0].should.have.key("launchType").equals("FARGATE")
|
||||
response["tasks"][0]["taskArn"].should.match(
|
||||
"arn:aws:ecs:us-east-1:{}:task/default/[a-z0-9-]+$".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task/default/[a-z0-9-]+$"
|
||||
)
|
||||
response["tasks"][0]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/default".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/default"
|
||||
)
|
||||
response["tasks"][0]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["tasks"][0]["containerInstanceArn"].should.contain(
|
||||
"arn:aws:ecs:us-east-1:{}:container-instance/".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:container-instance/"
|
||||
)
|
||||
response["tasks"][0]["overrides"].should.equal({})
|
||||
response["tasks"][0]["lastStatus"].should.equal("RUNNING")
|
||||
@ -1975,18 +1939,16 @@ def test_start_task():
|
||||
|
||||
len(response["tasks"]).should.equal(1)
|
||||
response["tasks"][0]["taskArn"].should.contain(
|
||||
"arn:aws:ecs:us-east-1:{}:task/".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task/"
|
||||
)
|
||||
response["tasks"][0]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["tasks"][0]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["tasks"][0]["containerInstanceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{0}:container-instance/test_ecs_cluster/{1}".format(
|
||||
ACCOUNT_ID, container_instance_id
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:container-instance/test_ecs_cluster/{container_instance_id}"
|
||||
)
|
||||
response["tasks"][0]["overrides"].should.equal({})
|
||||
response["tasks"][0]["lastStatus"].should.equal("RUNNING")
|
||||
@ -2190,7 +2152,7 @@ def test_describe_task_definition_by_family():
|
||||
)
|
||||
)
|
||||
task["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
task["volumes"].should.equal([])
|
||||
task["status"].should.equal("ACTIVE")
|
||||
@ -2508,16 +2470,16 @@ def test_task_definitions_with_port_clash():
|
||||
)
|
||||
len(response["tasks"]).should.equal(1)
|
||||
response["tasks"][0]["taskArn"].should.contain(
|
||||
"arn:aws:ecs:us-east-1:{}:task/".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task/"
|
||||
)
|
||||
response["tasks"][0]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["tasks"][0]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
response["tasks"][0]["containerInstanceArn"].should.contain(
|
||||
"arn:aws:ecs:us-east-1:{}:container-instance/".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:container-instance/"
|
||||
)
|
||||
response["tasks"][0]["overrides"].should.equal({})
|
||||
response["tasks"][0]["lastStatus"].should.equal("RUNNING")
|
||||
@ -2857,7 +2819,7 @@ def test_create_service_load_balancing():
|
||||
],
|
||||
)
|
||||
response["service"]["clusterArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:cluster/test_ecs_cluster".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:cluster/test_ecs_cluster"
|
||||
)
|
||||
response["service"]["desiredCount"].should.equal(2)
|
||||
len(response["service"]["events"]).should.equal(0)
|
||||
@ -2875,14 +2837,12 @@ def test_create_service_load_balancing():
|
||||
response["service"]["pendingCount"].should.equal(0)
|
||||
response["service"]["runningCount"].should.equal(0)
|
||||
response["service"]["serviceArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:service/test_ecs_cluster/test_ecs_service".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:service/test_ecs_cluster/test_ecs_service"
|
||||
)
|
||||
response["service"]["serviceName"].should.equal("test_ecs_service")
|
||||
response["service"]["status"].should.equal("ACTIVE")
|
||||
response["service"]["taskDefinition"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
|
||||
|
||||
@ -2912,7 +2872,7 @@ def test_list_tags_for_resource():
|
||||
type(response["taskDefinition"]).should.be(dict)
|
||||
response["taskDefinition"]["revision"].should.equal(1)
|
||||
response["taskDefinition"]["taskDefinitionArn"].should.equal(
|
||||
"arn:aws:ecs:us-east-1:{}:task-definition/test_ecs_task:1".format(ACCOUNT_ID)
|
||||
f"arn:aws:ecs:us-east-1:{ACCOUNT_ID}:task-definition/test_ecs_task:1"
|
||||
)
|
||||
|
||||
task_definition_arn = response["taskDefinition"]["taskDefinitionArn"]
|
||||
@ -3255,7 +3215,7 @@ def test_create_task_set():
|
||||
)["services"][0]["serviceArn"]
|
||||
task_set["clusterArn"].should.equal(cluster_arn)
|
||||
task_set["serviceArn"].should.equal(service_arn)
|
||||
task_set["taskDefinition"].should.match("{0}:1$".format(task_def_name))
|
||||
task_set["taskDefinition"].should.match(f"{task_def_name}:1$")
|
||||
task_set["scale"].should.equal({"value": 100.0, "unit": "PERCENT"})
|
||||
task_set["loadBalancers"][0]["targetGroupArn"].should.equal(
|
||||
"arn:aws:elasticloadbalancing:us-east-1:01234567890:targetgroup/"
|
||||
@ -3384,12 +3344,12 @@ def test_describe_task_sets():
|
||||
|
||||
task_sets[0].should.have.key("tags")
|
||||
task_sets.should.have.length_of(1)
|
||||
task_sets[0]["taskDefinition"].should.match("{0}:1$".format(task_def_name))
|
||||
task_sets[0]["taskDefinition"].should.match(f"{task_def_name}:1$")
|
||||
task_sets[0]["clusterArn"].should.equal(cluster_arn)
|
||||
task_sets[0]["serviceArn"].should.equal(service_arn)
|
||||
task_sets[0]["serviceArn"].should.match("{0}$".format(service_name))
|
||||
task_sets[0]["serviceArn"].should.match(f"{service_name}$")
|
||||
task_sets[0]["scale"].should.equal({"value": 100.0, "unit": "PERCENT"})
|
||||
task_sets[0]["taskSetArn"].should.match("{0}$".format(task_sets[0]["id"]))
|
||||
task_sets[0]["taskSetArn"].should.match(f"{task_sets[0]['id']}$")
|
||||
task_sets[0]["loadBalancers"][0]["targetGroupArn"].should.equal(
|
||||
"arn:aws:elasticloadbalancing:us-east-1:01234567890:targetgroup/"
|
||||
"c26b93c1bc35466ba792d5b08fe6a5bc/ec39113f8831453a"
|
||||
|
@ -262,7 +262,7 @@ def test_describe_file_systems_aws_create_sample_2(efs):
|
||||
def test_describe_file_systems_paging(efs):
|
||||
# Create several file systems.
|
||||
for i in range(10):
|
||||
efs.create_file_system(CreationToken="foobar_{}".format(i))
|
||||
efs.create_file_system(CreationToken=f"foobar_{i}")
|
||||
|
||||
# First call (Start)
|
||||
# ------------------
|
||||
|
@ -91,9 +91,9 @@ def test_create_mount_target_aws_sample_2(efs, ec2, file_system, subnet):
|
||||
ip_addr = ip_addr_obj.exploded
|
||||
break
|
||||
else:
|
||||
assert False, "Could not generate an IP address from CIDR block: {}".format(
|
||||
subnet["CidrBlock"]
|
||||
)
|
||||
assert (
|
||||
False
|
||||
), f"Could not generate an IP address from CIDR block: {subnet['CidrBlock']}"
|
||||
desc_sg_resp = ec2.describe_security_groups()
|
||||
security_group = desc_sg_resp["SecurityGroups"][0]
|
||||
security_group_id = security_group["GroupId"]
|
||||
@ -216,8 +216,8 @@ def test_create_mount_target_too_many_security_groups(efs, ec2, file_system, sub
|
||||
for i in range(6):
|
||||
sg_info = ec2.create_security_group(
|
||||
VpcId=subnet["VpcId"],
|
||||
GroupName="sg-{}".format(i),
|
||||
Description="SG-{} protects us from the Goa'uld.".format(i),
|
||||
GroupName=f"sg-{i}",
|
||||
Description=f"SG-{i} protects us from the Goa'uld.",
|
||||
)
|
||||
sg_id_list.append(sg_info["GroupId"])
|
||||
with pytest.raises(ClientError) as exc_info:
|
||||
|
@ -61,7 +61,7 @@ def test_efs_file_system_describe(efs_client):
|
||||
|
||||
|
||||
def test_efs_file_system_delete(file_system_id, efs_client):
|
||||
res = efs_client.delete("/2015-02-01/file-systems/{}".format(file_system_id))
|
||||
res = efs_client.delete(f"/2015-02-01/file-systems/{file_system_id}")
|
||||
assert res.status_code == 204
|
||||
|
||||
|
||||
@ -84,12 +84,10 @@ def test_efs_mount_target_delete(file_system_id, subnet_id, efs_client):
|
||||
json={"FileSystemId": file_system_id, "SubnetId": subnet_id},
|
||||
)
|
||||
mt_id = create_res.json["MountTargetId"]
|
||||
res = efs_client.delete("/2015-02-01/mount-targets/{}".format(mt_id))
|
||||
res = efs_client.delete(f"/2015-02-01/mount-targets/{mt_id}")
|
||||
assert res.status_code == 204
|
||||
|
||||
|
||||
def test_efs_describe_backup_policy(file_system_id, efs_client):
|
||||
res = efs_client.get(
|
||||
"/2015-02-01/file-systems/{}/backup-policy".format(file_system_id)
|
||||
)
|
||||
res = efs_client.get(f"/2015-02-01/file-systems/{file_system_id}/backup-policy")
|
||||
assert res.status_code == 200
|
||||
|
@ -60,7 +60,7 @@ def generate_fargate_profiles(client, cluster_name, num_profiles, minimal):
|
||||
client.create_fargate_profile(
|
||||
fargateProfileName=generate_random_name(),
|
||||
clusterName=cluster_name,
|
||||
**_input_builder(FargateProfileInputs, minimal)
|
||||
**_input_builder(FargateProfileInputs, minimal),
|
||||
)[ResponseAttributes.FARGATE_PROFILE][
|
||||
FargateProfileAttributes.FARGATE_PROFILE_NAME
|
||||
]
|
||||
@ -78,17 +78,14 @@ def generate_nodegroups(client, cluster_name, num_nodegroups, minimal):
|
||||
client.create_nodegroup(
|
||||
nodegroupName=generate_random_name(),
|
||||
clusterName=cluster_name,
|
||||
**_input_builder(NodegroupInputs, minimal)
|
||||
**_input_builder(NodegroupInputs, minimal),
|
||||
)[ResponseAttributes.NODEGROUP][NodegroupAttributes.NODEGROUP_NAME]
|
||||
for _ in range(num_nodegroups)
|
||||
]
|
||||
|
||||
|
||||
def generate_dict(prefix, count):
|
||||
return {
|
||||
"{prefix}_{count}".format(prefix=prefix, count=_count): str(_count)
|
||||
for _count in range(count)
|
||||
}
|
||||
return {f"{prefix}_{_count}": str(_count) for _count in range(count)}
|
||||
|
||||
|
||||
def is_valid_uri(value):
|
||||
|
@ -24,9 +24,7 @@ def test_create_simple_pipeline():
|
||||
pipeline.should.have.key("Id")
|
||||
pipeline.should.have.key("Name").being.equal("testpipeline")
|
||||
pipeline.should.have.key("Arn").being.equal(
|
||||
"arn:aws:elastictranscoder:{}:{}:pipeline/{}".format(
|
||||
region, ACCOUNT_ID, pipeline["Id"]
|
||||
)
|
||||
f"arn:aws:elastictranscoder:{region}:{ACCOUNT_ID}:pipeline/{pipeline['Id']}"
|
||||
)
|
||||
pipeline.should.have.key("Status").being.equal("Active")
|
||||
pipeline.should.have.key("InputBucket").being.equal("inputtest")
|
||||
@ -64,9 +62,7 @@ def test_create_pipeline_with_content_config():
|
||||
pipeline.should.have.key("Id")
|
||||
pipeline.should.have.key("Name").being.equal("testpipeline")
|
||||
pipeline.should.have.key("Arn").being.equal(
|
||||
"arn:aws:elastictranscoder:{}:{}:pipeline/{}".format(
|
||||
region, ACCOUNT_ID, pipeline["Id"]
|
||||
)
|
||||
f"arn:aws:elastictranscoder:{region}:{ACCOUNT_ID}:pipeline/{pipeline['Id']}"
|
||||
)
|
||||
pipeline.should.have.key("Status").being.equal("Active")
|
||||
pipeline.should.have.key("InputBucket").being.equal("inputtest")
|
||||
@ -186,9 +182,7 @@ def test_list_pipelines():
|
||||
pipeline.should.have.key("Id")
|
||||
pipeline.should.have.key("Name").being.equal("testpipeline")
|
||||
pipeline.should.have.key("Arn").being.equal(
|
||||
"arn:aws:elastictranscoder:{}:{}:pipeline/{}".format(
|
||||
region, ACCOUNT_ID, pipeline["Id"]
|
||||
)
|
||||
f"arn:aws:elastictranscoder:{region}:{ACCOUNT_ID}:pipeline/{pipeline['Id']}"
|
||||
)
|
||||
pipeline.should.have.key("Status").being.equal("Active")
|
||||
pipeline.should.have.key("InputBucket").being.equal("inputtest")
|
||||
@ -227,9 +221,7 @@ def test_read_pipeline():
|
||||
pipeline.should.have.key("Id")
|
||||
pipeline.should.have.key("Name").being.equal("testpipeline")
|
||||
pipeline.should.have.key("Arn").being.equal(
|
||||
"arn:aws:elastictranscoder:{}:{}:pipeline/{}".format(
|
||||
region, ACCOUNT_ID, pipeline["Id"]
|
||||
)
|
||||
f"arn:aws:elastictranscoder:{region}:{ACCOUNT_ID}:pipeline/{pipeline['Id']}"
|
||||
)
|
||||
pipeline.should.have.key("Status").being.equal("Active")
|
||||
pipeline.should.have.key("InputBucket").being.equal("inputtest")
|
||||
@ -271,9 +263,7 @@ def test_read_nonexisting_pipeline_format():
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("ResourceNotFoundException")
|
||||
err["Message"].should.equal(
|
||||
"The specified pipeline was not found: account={}, pipelineId={}.".format(
|
||||
ACCOUNT_ID, pipeline_id
|
||||
)
|
||||
f"The specified pipeline was not found: account={ACCOUNT_ID}, pipelineId={pipeline_id}."
|
||||
)
|
||||
|
||||
|
||||
@ -298,9 +288,7 @@ def test_update_pipeline_name():
|
||||
pipeline.should.have.key("Id")
|
||||
pipeline.should.have.key("Name").being.equal("newtestpipeline")
|
||||
pipeline.should.have.key("Arn").being.equal(
|
||||
"arn:aws:elastictranscoder:{}:{}:pipeline/{}".format(
|
||||
region, ACCOUNT_ID, pipeline["Id"]
|
||||
)
|
||||
f"arn:aws:elastictranscoder:{region}:{ACCOUNT_ID}:pipeline/{pipeline['Id']}"
|
||||
)
|
||||
pipeline.should.have.key("Status").being.equal("Active")
|
||||
pipeline.should.have.key("InputBucket").being.equal("inputtest")
|
||||
@ -369,9 +357,7 @@ def test_update_nonexisting_pipeline():
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("ResourceNotFoundException")
|
||||
err["Message"].should.equal(
|
||||
"The specified pipeline was not found: account={}, pipelineId={}.".format(
|
||||
ACCOUNT_ID, pipeline_id
|
||||
)
|
||||
f"The specified pipeline was not found: account={ACCOUNT_ID}, pipelineId={pipeline_id}."
|
||||
)
|
||||
|
||||
|
||||
@ -395,4 +381,4 @@ def test_delete_pipeline():
|
||||
|
||||
|
||||
def create_role_name(name):
|
||||
return "arn:aws:iam::{}:role/{}".format(ACCOUNT_ID, name)
|
||||
return f"arn:aws:iam::{ACCOUNT_ID}:role/{name}"
|
||||
|
@ -248,7 +248,7 @@ def test_describe_paginated_balancers():
|
||||
|
||||
for i in range(51):
|
||||
client.create_load_balancer(
|
||||
LoadBalancerName="my-lb%d" % i,
|
||||
LoadBalancerName=f"my-lb{i}",
|
||||
Listeners=[
|
||||
{"Protocol": "tcp", "LoadBalancerPort": 80, "InstancePort": 8080}
|
||||
],
|
||||
|
@ -399,7 +399,7 @@ def test_describe_paginated_balancers():
|
||||
|
||||
for i in range(51):
|
||||
conn.create_load_balancer(
|
||||
Name="my-lb%d" % i,
|
||||
Name=f"my-lb{i}",
|
||||
Subnets=[subnet1.id, subnet2.id],
|
||||
SecurityGroups=[security_group.id],
|
||||
Scheme="internal",
|
||||
@ -1685,9 +1685,7 @@ def test_cognito_action_listener_rule():
|
||||
action = {
|
||||
"Type": "authenticate-cognito",
|
||||
"AuthenticateCognitoConfig": {
|
||||
"UserPoolArn": "arn:aws:cognito-idp:us-east-1:{}:userpool/us-east-1_ABCD1234".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"UserPoolArn": f"arn:aws:cognito-idp:us-east-1:{ACCOUNT_ID}:userpool/us-east-1_ABCD1234",
|
||||
"UserPoolClientId": "abcd1234abcd",
|
||||
"UserPoolDomain": "testpool",
|
||||
"AuthenticationRequestExtraParams": {"param": "test"},
|
||||
|
@ -144,9 +144,7 @@ def test_cognito_action_listener_rule_cloudformation():
|
||||
{
|
||||
"Type": "authenticate-cognito",
|
||||
"AuthenticateCognitoConfig": {
|
||||
"UserPoolArn": "arn:aws:cognito-idp:us-east-1:{}:userpool/us-east-1_ABCD1234".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"UserPoolArn": f"arn:aws:cognito-idp:us-east-1:{ACCOUNT_ID}:userpool/us-east-1_ABCD1234",
|
||||
"UserPoolClientId": "abcd1234abcd",
|
||||
"UserPoolDomain": "testpool",
|
||||
},
|
||||
@ -175,9 +173,7 @@ def test_cognito_action_listener_rule_cloudformation():
|
||||
{
|
||||
"Type": "authenticate-cognito",
|
||||
"AuthenticateCognitoConfig": {
|
||||
"UserPoolArn": "arn:aws:cognito-idp:us-east-1:{}:userpool/us-east-1_ABCD1234".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"UserPoolArn": f"arn:aws:cognito-idp:us-east-1:{ACCOUNT_ID}:userpool/us-east-1_ABCD1234",
|
||||
"UserPoolClientId": "abcd1234abcd",
|
||||
"UserPoolDomain": "testpool",
|
||||
},
|
||||
|
@ -131,11 +131,7 @@ def test_create_target_group_and_listeners():
|
||||
Protocol="HTTPS",
|
||||
Port=443,
|
||||
Certificates=[
|
||||
{
|
||||
"CertificateArn": "arn:aws:iam:{}:server-certificate/test-cert".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
}
|
||||
{"CertificateArn": f"arn:aws:iam:{ACCOUNT_ID}:server-certificate/test-cert"}
|
||||
],
|
||||
DefaultActions=[actions],
|
||||
)
|
||||
@ -143,13 +139,7 @@ def test_create_target_group_and_listeners():
|
||||
listener.get("Port").should.equal(443)
|
||||
listener.get("Protocol").should.equal("HTTPS")
|
||||
listener.get("Certificates").should.equal(
|
||||
[
|
||||
{
|
||||
"CertificateArn": "arn:aws:iam:{}:server-certificate/test-cert".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
}
|
||||
]
|
||||
[{"CertificateArn": f"arn:aws:iam:{ACCOUNT_ID}:server-certificate/test-cert"}]
|
||||
)
|
||||
listener.get("DefaultActions").should.equal(
|
||||
[{"TargetGroupArn": target_group_arn, "Type": "forward"}]
|
||||
|
@ -183,9 +183,7 @@ def test_describe_cluster():
|
||||
cl["TerminationProtected"].should.equal(False)
|
||||
cl["VisibleToAllUsers"].should.equal(True)
|
||||
cl["ClusterArn"].should.equal(
|
||||
"arn:aws:elasticmapreduce:{0}:{1}:cluster/{2}".format(
|
||||
region_name, ACCOUNT_ID, cluster_id
|
||||
)
|
||||
f"arn:aws:elasticmapreduce:{region_name}:{ACCOUNT_ID}:cluster/{cluster_id}"
|
||||
)
|
||||
|
||||
|
||||
@ -396,7 +394,7 @@ def test_run_job_flow():
|
||||
args = deepcopy(run_job_flow_args)
|
||||
resp = client.run_job_flow(**args)
|
||||
resp["ClusterArn"].startswith(
|
||||
"arn:aws:elasticmapreduce:{0}:{1}:cluster/".format(region_name, ACCOUNT_ID)
|
||||
f"arn:aws:elasticmapreduce:{region_name}:{ACCOUNT_ID}:cluster/"
|
||||
)
|
||||
job_flow_id = resp["JobFlowId"]
|
||||
resp = client.describe_job_flows(JobFlowIds=[job_flow_id])["JobFlows"][0]
|
||||
@ -584,9 +582,7 @@ def test_put_remove_auto_scaling_policy():
|
||||
del resp["AutoScalingPolicy"]["Status"]
|
||||
resp["AutoScalingPolicy"].should.equal(auto_scaling_policy_with_cluster_id)
|
||||
resp["ClusterArn"].should.equal(
|
||||
"arn:aws:elasticmapreduce:{0}:{1}:cluster/{2}".format(
|
||||
region_name, ACCOUNT_ID, cluster_id
|
||||
)
|
||||
f"arn:aws:elasticmapreduce:{region_name}:{ACCOUNT_ID}:cluster/{cluster_id}"
|
||||
)
|
||||
|
||||
core_instance_group = [
|
||||
|
@ -185,7 +185,7 @@ def test_describe_rule():
|
||||
|
||||
response["Name"].should.equal(rule_name)
|
||||
response["Arn"].should.equal(
|
||||
"arn:aws:events:us-west-2:{0}:rule/{1}".format(ACCOUNT_ID, rule_name)
|
||||
f"arn:aws:events:us-west-2:{ACCOUNT_ID}:rule/{rule_name}"
|
||||
)
|
||||
|
||||
|
||||
@ -201,7 +201,7 @@ def test_describe_rule_with_event_bus_name():
|
||||
EventPattern=json.dumps({"account": [ACCOUNT_ID]}),
|
||||
State="DISABLED",
|
||||
Description="test rule",
|
||||
RoleArn="arn:aws:iam::{}:role/test-role".format(ACCOUNT_ID),
|
||||
RoleArn=f"arn:aws:iam::{ACCOUNT_ID}:role/test-role",
|
||||
EventBusName=event_bus_name,
|
||||
)
|
||||
|
||||
@ -210,18 +210,14 @@ def test_describe_rule_with_event_bus_name():
|
||||
|
||||
# then
|
||||
response["Arn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{0}:rule/{1}/{2}".format(
|
||||
ACCOUNT_ID, event_bus_name, rule_name
|
||||
)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:rule/{event_bus_name}/{rule_name}"
|
||||
)
|
||||
response["CreatedBy"].should.equal(ACCOUNT_ID)
|
||||
response["Description"].should.equal("test rule")
|
||||
response["EventBusName"].should.equal(event_bus_name)
|
||||
json.loads(response["EventPattern"]).should.equal({"account": [ACCOUNT_ID]})
|
||||
response["Name"].should.equal(rule_name)
|
||||
response["RoleArn"].should.equal(
|
||||
"arn:aws:iam::{}:role/test-role".format(ACCOUNT_ID)
|
||||
)
|
||||
response["RoleArn"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:role/test-role")
|
||||
response["State"].should.equal("DISABLED")
|
||||
|
||||
response.should_not.have.key("ManagedBy")
|
||||
@ -505,9 +501,7 @@ def test_put_targets_error_missing_parameter_sqs_fifo():
|
||||
Targets=[
|
||||
{
|
||||
"Id": "sqs-fifo",
|
||||
"Arn": "arn:aws:sqs:eu-central-1:{}:test-queue.fifo".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:sqs:eu-central-1:{ACCOUNT_ID}:test-queue.fifo",
|
||||
}
|
||||
],
|
||||
)
|
||||
@ -770,7 +764,7 @@ def test_create_event_bus():
|
||||
response = client.create_event_bus(Name="test-bus")
|
||||
|
||||
response["EventBusArn"].should.equal(
|
||||
"arn:aws:events:us-east-1:{}:event-bus/test-bus".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/test-bus"
|
||||
)
|
||||
|
||||
|
||||
@ -808,7 +802,7 @@ def test_describe_event_bus():
|
||||
|
||||
response["Name"].should.equal("default")
|
||||
response["Arn"].should.equal(
|
||||
"arn:aws:events:us-east-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/default"
|
||||
)
|
||||
response.should_not.have.key("Policy")
|
||||
|
||||
@ -824,7 +818,7 @@ def test_describe_event_bus():
|
||||
|
||||
response["Name"].should.equal("test-bus")
|
||||
response["Arn"].should.equal(
|
||||
"arn:aws:events:us-east-1:{}:event-bus/test-bus".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/test-bus"
|
||||
)
|
||||
json.loads(response["Policy"]).should.equal(
|
||||
{
|
||||
@ -835,9 +829,7 @@ def test_describe_event_bus():
|
||||
"Effect": "Allow",
|
||||
"Principal": {"AWS": "arn:aws:iam::111111111111:root"},
|
||||
"Action": "events:PutEvents",
|
||||
"Resource": "arn:aws:events:us-east-1:{}:event-bus/test-bus".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Resource": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/test-bus",
|
||||
}
|
||||
],
|
||||
}
|
||||
@ -868,33 +860,23 @@ def test_list_event_buses():
|
||||
[
|
||||
{
|
||||
"Name": "default",
|
||||
"Arn": "arn:aws:events:us-east-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/default",
|
||||
},
|
||||
{
|
||||
"Name": "other-bus-1",
|
||||
"Arn": "arn:aws:events:us-east-1:{}:event-bus/other-bus-1".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/other-bus-1",
|
||||
},
|
||||
{
|
||||
"Name": "other-bus-2",
|
||||
"Arn": "arn:aws:events:us-east-1:{}:event-bus/other-bus-2".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/other-bus-2",
|
||||
},
|
||||
{
|
||||
"Name": "test-bus-1",
|
||||
"Arn": "arn:aws:events:us-east-1:{}:event-bus/test-bus-1".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/test-bus-1",
|
||||
},
|
||||
{
|
||||
"Name": "test-bus-2",
|
||||
"Arn": "arn:aws:events:us-east-1:{}:event-bus/test-bus-2".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/test-bus-2",
|
||||
},
|
||||
]
|
||||
)
|
||||
@ -906,15 +888,11 @@ def test_list_event_buses():
|
||||
[
|
||||
{
|
||||
"Name": "other-bus-1",
|
||||
"Arn": "arn:aws:events:us-east-1:{}:event-bus/other-bus-1".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/other-bus-1",
|
||||
},
|
||||
{
|
||||
"Name": "other-bus-2",
|
||||
"Arn": "arn:aws:events:us-east-1:{}:event-bus/other-bus-2".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/other-bus-2",
|
||||
},
|
||||
]
|
||||
)
|
||||
@ -936,9 +914,7 @@ def test_delete_event_bus():
|
||||
[
|
||||
{
|
||||
"Name": "default",
|
||||
"Arn": "arn:aws:events:us-east-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:us-east-1:{ACCOUNT_ID}:event-bus/default",
|
||||
}
|
||||
]
|
||||
)
|
||||
@ -1016,9 +992,7 @@ def test_tag_resource_error_unknown_arn():
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.tag_resource(
|
||||
ResourceARN="arn:aws:events:eu-central-1:{0}:rule/unknown".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
ResourceARN=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:rule/unknown",
|
||||
Tags=[],
|
||||
)
|
||||
|
||||
@ -1040,9 +1014,7 @@ def test_untag_resource_error_unknown_arn():
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.untag_resource(
|
||||
ResourceARN="arn:aws:events:eu-central-1:{0}:rule/unknown".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
ResourceARN=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:rule/unknown",
|
||||
TagKeys=[],
|
||||
)
|
||||
|
||||
@ -1064,9 +1036,7 @@ def test_list_tags_for_resource_error_unknown_arn():
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.list_tags_for_resource(
|
||||
ResourceARN="arn:aws:events:eu-central-1:{0}:rule/unknown".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
ResourceARN=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:rule/unknown"
|
||||
)
|
||||
|
||||
# then
|
||||
@ -1088,24 +1058,22 @@ def test_create_archive():
|
||||
# when
|
||||
response = client.create_archive(
|
||||
ArchiveName=archive_name,
|
||||
EventSourceArn="arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
EventSourceArn=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default",
|
||||
)
|
||||
|
||||
# then
|
||||
response["ArchiveArn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{0}:archive/{1}".format(ACCOUNT_ID, archive_name)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:archive/{archive_name}"
|
||||
)
|
||||
response["CreationTime"].should.be.a(datetime)
|
||||
response["State"].should.equal("ENABLED")
|
||||
|
||||
# check for archive rule existence
|
||||
rule_name = "Events-Archive-{}".format(archive_name)
|
||||
rule_name = f"Events-Archive-{archive_name}"
|
||||
response = client.describe_rule(Name=rule_name)
|
||||
|
||||
response["Arn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{0}:rule/{1}".format(ACCOUNT_ID, rule_name)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:rule/{rule_name}"
|
||||
)
|
||||
response["CreatedBy"].should.equal(ACCOUNT_ID)
|
||||
response["EventBusName"].should.equal("default")
|
||||
@ -1142,7 +1110,7 @@ def test_create_archive_custom_event_bus():
|
||||
|
||||
# then
|
||||
response["ArchiveArn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{}:archive/test-archive".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:archive/test-archive"
|
||||
)
|
||||
response["CreationTime"].should.be.a(datetime)
|
||||
response["State"].should.equal("ENABLED")
|
||||
@ -1159,7 +1127,7 @@ def test_create_archive_error_long_name():
|
||||
client.create_archive(
|
||||
ArchiveName=name,
|
||||
EventSourceArn=(
|
||||
"arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
),
|
||||
)
|
||||
|
||||
@ -1170,8 +1138,8 @@ def test_create_archive_error_long_name():
|
||||
ex.response["Error"]["Code"].should.contain("ValidationException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
" 1 validation error detected: "
|
||||
"Value '{}' at 'archiveName' failed to satisfy constraint: "
|
||||
"Member must have length less than or equal to 48".format(name)
|
||||
f"Value '{name}' at 'archiveName' failed to satisfy constraint: "
|
||||
"Member must have length less than or equal to 48"
|
||||
)
|
||||
|
||||
|
||||
@ -1185,7 +1153,7 @@ def test_create_archive_error_invalid_event_pattern():
|
||||
client.create_archive(
|
||||
ArchiveName="test-archive",
|
||||
EventSourceArn=(
|
||||
"arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
),
|
||||
EventPattern="invalid",
|
||||
)
|
||||
@ -1210,7 +1178,7 @@ def test_create_archive_error_invalid_event_pattern_not_an_array():
|
||||
client.create_archive(
|
||||
ArchiveName="test-archive",
|
||||
EventSourceArn=(
|
||||
"arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
),
|
||||
EventPattern=json.dumps(
|
||||
{
|
||||
@ -1243,9 +1211,7 @@ def test_create_archive_error_unknown_event_bus():
|
||||
client.create_archive(
|
||||
ArchiveName="test-archive",
|
||||
EventSourceArn=(
|
||||
"arn:aws:events:eu-central-1:{}:event-bus/{}".format(
|
||||
ACCOUNT_ID, event_bus_name
|
||||
)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/{event_bus_name}"
|
||||
),
|
||||
)
|
||||
|
||||
@ -1255,7 +1221,7 @@ def test_create_archive_error_unknown_event_bus():
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Event bus {} does not exist.".format(event_bus_name)
|
||||
f"Event bus {event_bus_name} does not exist."
|
||||
)
|
||||
|
||||
|
||||
@ -1264,7 +1230,7 @@ def test_create_archive_error_duplicate():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-archive"
|
||||
source_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
source_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
client.create_archive(ArchiveName=name, EventSourceArn=source_arn)
|
||||
|
||||
# when
|
||||
@ -1284,7 +1250,7 @@ def test_describe_archive():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-archive"
|
||||
source_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
source_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
event_pattern = json.dumps({"key": ["value"]})
|
||||
client.create_archive(
|
||||
ArchiveName=name,
|
||||
@ -1298,7 +1264,7 @@ def test_describe_archive():
|
||||
|
||||
# then
|
||||
response["ArchiveArn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{0}:archive/{1}".format(ACCOUNT_ID, name)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:archive/{name}"
|
||||
)
|
||||
response["ArchiveName"].should.equal(name)
|
||||
response["CreationTime"].should.be.a(datetime)
|
||||
@ -1326,9 +1292,7 @@ def test_describe_archive_error_unknown_archive():
|
||||
ex.operation_name.should.equal("DescribeArchive")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Archive {} does not exist.".format(name)
|
||||
)
|
||||
ex.response["Error"]["Message"].should.equal(f"Archive {name} does not exist.")
|
||||
|
||||
|
||||
@mock_events
|
||||
@ -1336,7 +1300,7 @@ def test_list_archives():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-archive"
|
||||
source_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
source_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
event_pattern = json.dumps({"key": ["value"]})
|
||||
client.create_archive(
|
||||
ArchiveName=name,
|
||||
@ -1368,7 +1332,7 @@ def test_list_archives():
|
||||
def test_list_archives_with_name_prefix():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
source_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
source_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
client.create_archive(ArchiveName="test", EventSourceArn=source_arn)
|
||||
client.create_archive(ArchiveName="test-archive", EventSourceArn=source_arn)
|
||||
|
||||
@ -1384,7 +1348,7 @@ def test_list_archives_with_name_prefix():
|
||||
def test_list_archives_with_source_arn():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
source_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
source_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
source_arn_2 = client.create_event_bus(Name="test-bus")["EventBusArn"]
|
||||
client.create_archive(ArchiveName="test", EventSourceArn=source_arn)
|
||||
client.create_archive(ArchiveName="test-archive", EventSourceArn=source_arn_2)
|
||||
@ -1401,7 +1365,7 @@ def test_list_archives_with_source_arn():
|
||||
def test_list_archives_with_state():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
source_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
source_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
client.create_archive(ArchiveName="test", EventSourceArn=source_arn)
|
||||
client.create_archive(ArchiveName="test-archive", EventSourceArn=source_arn)
|
||||
|
||||
@ -1459,7 +1423,7 @@ def test_update_archive():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-archive"
|
||||
source_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(ACCOUNT_ID)
|
||||
source_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
event_pattern = json.dumps({"key": ["value"]})
|
||||
archive_arn = client.create_archive(ArchiveName=name, EventSourceArn=source_arn)[
|
||||
"ArchiveArn"
|
||||
@ -1499,9 +1463,7 @@ def test_update_archive_error_invalid_event_pattern():
|
||||
name = "test-archive"
|
||||
client.create_archive(
|
||||
ArchiveName=name,
|
||||
EventSourceArn="arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
EventSourceArn=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default",
|
||||
)
|
||||
|
||||
# when
|
||||
@ -1533,9 +1495,7 @@ def test_update_archive_error_unknown_archive():
|
||||
ex.operation_name.should.equal("UpdateArchive")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Archive {} does not exist.".format(name)
|
||||
)
|
||||
ex.response["Error"]["Message"].should.equal(f"Archive {name} does not exist.")
|
||||
|
||||
|
||||
@mock_events
|
||||
@ -1545,9 +1505,7 @@ def test_delete_archive():
|
||||
name = "test-archive"
|
||||
client.create_archive(
|
||||
ArchiveName=name,
|
||||
EventSourceArn="arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
EventSourceArn=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default",
|
||||
)
|
||||
|
||||
# when
|
||||
@ -1573,9 +1531,7 @@ def test_delete_archive_error_unknown_archive():
|
||||
ex.operation_name.should.equal("DeleteArchive")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Archive {} does not exist.".format(name)
|
||||
)
|
||||
ex.response["Error"]["Message"].should.equal(f"Archive {name} does not exist.")
|
||||
|
||||
|
||||
@mock_events
|
||||
@ -1585,9 +1541,7 @@ def test_archive_actual_events():
|
||||
name = "test-archive"
|
||||
name_2 = "test-archive-no-match"
|
||||
name_3 = "test-archive-matches"
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
event = {
|
||||
"Source": "source",
|
||||
"DetailType": "type",
|
||||
@ -1629,9 +1583,7 @@ def test_archive_actual_events():
|
||||
def test_archive_event_with_bus_arn():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_name = "mock_archive"
|
||||
event_with_bus_arn = {
|
||||
"Source": "source",
|
||||
@ -1658,9 +1610,7 @@ def test_start_replay():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-replay"
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-archive", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -1676,7 +1626,7 @@ def test_start_replay():
|
||||
|
||||
# then
|
||||
response["ReplayArn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{0}:replay/{1}".format(ACCOUNT_ID, name)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:replay/{name}"
|
||||
)
|
||||
response["ReplayStartTime"].should.be.a(datetime)
|
||||
response["State"].should.equal("STARTING")
|
||||
@ -1692,15 +1642,11 @@ def test_start_replay_error_unknown_event_bus():
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.start_replay(
|
||||
ReplayName="test",
|
||||
EventSourceArn="arn:aws:events:eu-central-1:{}:archive/test".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
EventSourceArn=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:archive/test",
|
||||
EventStartTime=datetime(2021, 2, 1),
|
||||
EventEndTime=datetime(2021, 2, 2),
|
||||
Destination={
|
||||
"Arn": "arn:aws:events:eu-central-1:{0}:event-bus/{1}".format(
|
||||
ACCOUNT_ID, event_bus_name
|
||||
),
|
||||
"Arn": f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/{event_bus_name}",
|
||||
},
|
||||
)
|
||||
|
||||
@ -1710,7 +1656,7 @@ def test_start_replay_error_unknown_event_bus():
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Event bus {} does not exist.".format(event_bus_name)
|
||||
f"Event bus {event_bus_name} does not exist."
|
||||
)
|
||||
|
||||
|
||||
@ -1723,9 +1669,7 @@ def test_start_replay_error_invalid_event_bus_arn():
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.start_replay(
|
||||
ReplayName="test",
|
||||
EventSourceArn="arn:aws:events:eu-central-1:{}:archive/test".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
EventSourceArn=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:archive/test",
|
||||
EventStartTime=datetime(2021, 2, 1),
|
||||
EventEndTime=datetime(2021, 2, 2),
|
||||
Destination={
|
||||
@ -1753,15 +1697,11 @@ def test_start_replay_error_unknown_archive():
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.start_replay(
|
||||
ReplayName="test",
|
||||
EventSourceArn="arn:aws:events:eu-central-1:{0}:archive/{1}".format(
|
||||
ACCOUNT_ID, archive_name
|
||||
),
|
||||
EventSourceArn=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:archive/{archive_name}",
|
||||
EventStartTime=datetime(2021, 2, 1),
|
||||
EventEndTime=datetime(2021, 2, 2),
|
||||
Destination={
|
||||
"Arn": "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"Arn": f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default",
|
||||
},
|
||||
)
|
||||
|
||||
@ -1772,7 +1712,7 @@ def test_start_replay_error_unknown_archive():
|
||||
ex.response["Error"]["Code"].should.contain("ValidationException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Parameter EventSourceArn is not valid. "
|
||||
"Reason: Archive {} does not exist.".format(archive_name)
|
||||
f"Reason: Archive {archive_name} does not exist."
|
||||
)
|
||||
|
||||
|
||||
@ -1782,9 +1722,7 @@ def test_start_replay_error_cross_event_bus():
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-archive",
|
||||
EventSourceArn="arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
EventSourceArn=f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default",
|
||||
)["ArchiveArn"]
|
||||
event_bus_arn = client.create_event_bus(Name="test-bus")["EventBusArn"]
|
||||
|
||||
@ -1813,9 +1751,7 @@ def test_start_replay_error_cross_event_bus():
|
||||
def test_start_replay_error_invalid_end_time():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-archive", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -1846,9 +1782,7 @@ def test_start_replay_error_duplicate():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-replay"
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-archive", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -1875,9 +1809,7 @@ def test_start_replay_error_duplicate():
|
||||
ex.operation_name.should.equal("StartReplay")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceAlreadyExistsException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Replay {} already exists.".format(name)
|
||||
)
|
||||
ex.response["Error"]["Message"].should.equal(f"Replay {name} already exists.")
|
||||
|
||||
|
||||
@mock_events
|
||||
@ -1885,9 +1817,7 @@ def test_describe_replay():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-replay"
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-archive", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -1910,7 +1840,7 @@ def test_describe_replay():
|
||||
response["EventStartTime"].should.equal(datetime(2021, 2, 1, tzinfo=pytz.utc))
|
||||
response["EventEndTime"].should.equal(datetime(2021, 2, 2, tzinfo=pytz.utc))
|
||||
response["ReplayArn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{0}:replay/{1}".format(ACCOUNT_ID, name)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:replay/{name}"
|
||||
)
|
||||
response["ReplayName"].should.equal(name)
|
||||
response["ReplayStartTime"].should.be.a(datetime)
|
||||
@ -1933,9 +1863,7 @@ def test_describe_replay_error_unknown_replay():
|
||||
ex.operation_name.should.equal("DescribeReplay")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Replay {} does not exist.".format(name)
|
||||
)
|
||||
ex.response["Error"]["Message"].should.equal(f"Replay {name} does not exist.")
|
||||
|
||||
|
||||
@mock_events
|
||||
@ -1943,9 +1871,7 @@ def test_list_replays():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-replay"
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-replay", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -1977,9 +1903,7 @@ def test_list_replays():
|
||||
def test_list_replays_with_name_prefix():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-replay", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -2010,9 +1934,7 @@ def test_list_replays_with_name_prefix():
|
||||
def test_list_replays_with_source_arn():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-replay", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -2042,9 +1964,7 @@ def test_list_replays_with_source_arn():
|
||||
def test_list_replays_with_state():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-replay", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -2117,9 +2037,7 @@ def test_cancel_replay():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-replay"
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-archive", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -2137,7 +2055,7 @@ def test_cancel_replay():
|
||||
|
||||
# then
|
||||
response["ReplayArn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{0}:replay/{1}".format(ACCOUNT_ID, name)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:replay/{name}"
|
||||
)
|
||||
response["State"].should.equal("CANCELLING")
|
||||
|
||||
@ -2160,9 +2078,7 @@ def test_cancel_replay_error_unknown_replay():
|
||||
ex.operation_name.should.equal("CancelReplay")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("ResourceNotFoundException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Replay {} does not exist.".format(name)
|
||||
)
|
||||
ex.response["Error"]["Message"].should.equal(f"Replay {name} does not exist.")
|
||||
|
||||
|
||||
@mock_events
|
||||
@ -2170,9 +2086,7 @@ def test_cancel_replay_error_illegal_state():
|
||||
# given
|
||||
client = boto3.client("events", "eu-central-1")
|
||||
name = "test-replay"
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
archive_arn = client.create_archive(
|
||||
ArchiveName="test-archive", EventSourceArn=event_bus_arn
|
||||
)["ArchiveArn"]
|
||||
@ -2196,7 +2110,7 @@ def test_cancel_replay_error_illegal_state():
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("IllegalStatusException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"Replay {} is not in a valid state for this operation.".format(name)
|
||||
f"Replay {name} is not in a valid state for this operation."
|
||||
)
|
||||
|
||||
|
||||
@ -2209,18 +2123,14 @@ def test_start_replay_send_to_log_group():
|
||||
log_group_name = "/test-group"
|
||||
rule_name = "test-rule"
|
||||
logs_client.create_log_group(logGroupName=log_group_name)
|
||||
event_bus_arn = "arn:aws:events:eu-central-1:{}:event-bus/default".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
event_bus_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:event-bus/default"
|
||||
client.put_rule(Name=rule_name, EventPattern=json.dumps({"account": [ACCOUNT_ID]}))
|
||||
client.put_targets(
|
||||
Rule=rule_name,
|
||||
Targets=[
|
||||
{
|
||||
"Id": "test",
|
||||
"Arn": "arn:aws:logs:eu-central-1:{0}:log-group:{1}".format(
|
||||
ACCOUNT_ID, log_group_name
|
||||
),
|
||||
"Arn": f"arn:aws:logs:eu-central-1:{ACCOUNT_ID}:log-group:{log_group_name}",
|
||||
}
|
||||
],
|
||||
)
|
||||
@ -2296,13 +2206,13 @@ def test_create_and_list_connections():
|
||||
)
|
||||
|
||||
response.get("ConnectionArn").should.contain(
|
||||
"arn:aws:events:eu-central-1:{0}:connection/test/".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:connection/test/"
|
||||
)
|
||||
|
||||
response = client.list_connections()
|
||||
|
||||
response.get("Connections")[0].get("ConnectionArn").should.contain(
|
||||
"arn:aws:events:eu-central-1:{0}:connection/test/".format(ACCOUNT_ID)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:connection/test/"
|
||||
)
|
||||
|
||||
|
||||
|
@ -84,7 +84,7 @@ def test_create_archive():
|
||||
cfn_client.create_stack(StackName=stack_name, TemplateBody=template)
|
||||
|
||||
# then
|
||||
archive_arn = "arn:aws:events:eu-central-1:{0}:archive/{1}".format(ACCOUNT_ID, name)
|
||||
archive_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:archive/{name}"
|
||||
stack = cfn_client.describe_stacks(StackName=stack_name)["Stacks"][0]
|
||||
stack["Outputs"][0]["OutputValue"].should.equal(archive_arn)
|
||||
|
||||
@ -119,7 +119,7 @@ def test_update_archive():
|
||||
response = events_client.describe_archive(ArchiveName=name)
|
||||
|
||||
response["ArchiveArn"].should.equal(
|
||||
"arn:aws:events:eu-central-1:{0}:archive/{1}".format(ACCOUNT_ID, name)
|
||||
f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:archive/{name}"
|
||||
)
|
||||
response["Description"].should.equal("test archive")
|
||||
|
||||
@ -156,7 +156,7 @@ def test_create_rule():
|
||||
cfn_client.create_stack(StackName=stack_name, TemplateBody=template)
|
||||
|
||||
# then
|
||||
rule_arn = "arn:aws:events:eu-central-1:{0}:rule/{1}".format(ACCOUNT_ID, name)
|
||||
rule_arn = f"arn:aws:events:eu-central-1:{ACCOUNT_ID}:rule/{name}"
|
||||
stack = cfn_client.describe_stacks(StackName=stack_name)["Stacks"][0]
|
||||
stack["Outputs"][0]["OutputValue"].should.equal(rule_arn)
|
||||
|
||||
|
@ -28,9 +28,7 @@ def test_send_to_cw_log_group():
|
||||
Targets=[
|
||||
{
|
||||
"Id": "logs",
|
||||
"Arn": "arn:aws:logs:eu-central-1:{0}:log-group:{1}".format(
|
||||
ACCOUNT_ID, log_group_name
|
||||
),
|
||||
"Arn": f"arn:aws:logs:eu-central-1:{ACCOUNT_ID}:log-group:{log_group_name}",
|
||||
}
|
||||
],
|
||||
)
|
||||
|
@ -16,7 +16,7 @@ def create_extended_s3_delivery_stream(client, stream_name):
|
||||
DeliveryStreamName=stream_name,
|
||||
DeliveryStreamType="DirectPut",
|
||||
ExtendedS3DestinationConfiguration={
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(ACCOUNT_ID),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"CompressionFormat": "UNCOMPRESSED",
|
||||
@ -28,9 +28,7 @@ def create_extended_s3_delivery_stream(client, stream_name):
|
||||
},
|
||||
"SchemaConfiguration": {
|
||||
"DatabaseName": stream_name,
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"TableName": "outputTable",
|
||||
},
|
||||
},
|
||||
@ -43,7 +41,7 @@ def create_redshift_delivery_stream(client, stream_name):
|
||||
return client.create_delivery_stream(
|
||||
DeliveryStreamName=stream_name,
|
||||
RedshiftDestinationConfiguration={
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(ACCOUNT_ID),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"ClusterJDBCURL": "jdbc:redshift://host.amazonaws.com:5439/database",
|
||||
"CopyCommand": {
|
||||
"DataTableName": "outputTable",
|
||||
@ -52,9 +50,7 @@ def create_redshift_delivery_stream(client, stream_name):
|
||||
"Username": "username",
|
||||
"Password": "password",
|
||||
"S3Configuration": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"BufferingHints": {"SizeInMBs": 123, "IntervalInSeconds": 124},
|
||||
@ -70,7 +66,7 @@ def create_elasticsearch_delivery_stream(client, stream_name):
|
||||
DeliveryStreamName=stream_name,
|
||||
DeliveryStreamType="DirectPut",
|
||||
ElasticsearchDestinationConfiguration={
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(ACCOUNT_ID),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"DomainARN": "arn:aws:es:::domain/firehose-test",
|
||||
"IndexName": "myIndex",
|
||||
"TypeName": "UNCOMPRESSED",
|
||||
@ -78,9 +74,7 @@ def create_elasticsearch_delivery_stream(client, stream_name):
|
||||
"BufferingHints": {"IntervalInSeconds": 123, "SizeInMBs": 123},
|
||||
"RetryOptions": {"DurationInSeconds": 123},
|
||||
"S3Configuration": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"BufferingHints": {"SizeInMBs": 123, "IntervalInSeconds": 124},
|
||||
@ -101,9 +95,7 @@ def create_http_delivery_stream(client, stream_name):
|
||||
"BufferingHints": {"SizeInMBs": 123, "IntervalInSeconds": 124},
|
||||
"CloudWatchLoggingOptions": {"Enabled": False},
|
||||
"S3Configuration": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"BufferingHints": {"SizeInMBs": 123, "IntervalInSeconds": 124},
|
||||
@ -140,9 +132,7 @@ def test_create_redshift_delivery_stream():
|
||||
{
|
||||
"DestinationId": "destinationId-000000000001",
|
||||
"RedshiftDestinationDescription": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"ClusterJDBCURL": "jdbc:redshift://host.amazonaws.com:5439/database",
|
||||
"CopyCommand": {
|
||||
"DataTableName": "outputTable",
|
||||
@ -150,9 +140,7 @@ def test_create_redshift_delivery_stream():
|
||||
},
|
||||
"Username": "username",
|
||||
"S3DestinationDescription": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"BufferingHints": {
|
||||
@ -196,9 +184,7 @@ def test_create_extended_s3_delivery_stream():
|
||||
{
|
||||
"DestinationId": "destinationId-000000000001",
|
||||
"ExtendedS3DestinationDescription": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"CompressionFormat": "UNCOMPRESSED",
|
||||
@ -214,17 +200,13 @@ def test_create_extended_s3_delivery_stream():
|
||||
},
|
||||
"SchemaConfiguration": {
|
||||
"DatabaseName": stream_name,
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"TableName": "outputTable",
|
||||
},
|
||||
},
|
||||
},
|
||||
"S3DestinationDescription": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"CompressionFormat": "UNCOMPRESSED",
|
||||
@ -263,9 +245,7 @@ def test_create_elasticsearch_delivery_stream():
|
||||
{
|
||||
"DestinationId": "destinationId-000000000001",
|
||||
"ElasticsearchDestinationDescription": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"DomainARN": "arn:aws:es:::domain/firehose-test",
|
||||
"IndexName": "myIndex",
|
||||
"TypeName": "UNCOMPRESSED",
|
||||
@ -273,9 +253,7 @@ def test_create_elasticsearch_delivery_stream():
|
||||
"BufferingHints": {"IntervalInSeconds": 123, "SizeInMBs": 123},
|
||||
"RetryOptions": {"DurationInSeconds": 123},
|
||||
"S3DestinationDescription": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"BufferingHints": {
|
||||
@ -301,7 +279,7 @@ def test_create_s3_delivery_stream():
|
||||
response = client.create_delivery_stream(
|
||||
DeliveryStreamName=stream_name,
|
||||
S3DestinationConfiguration={
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(ACCOUNT_ID),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"BufferingHints": {"SizeInMBs": 123, "IntervalInSeconds": 124},
|
||||
@ -328,9 +306,7 @@ def test_create_s3_delivery_stream():
|
||||
{
|
||||
"DestinationId": "destinationId-000000000001",
|
||||
"S3DestinationDescription": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"BufferingHints": {"SizeInMBs": 123, "IntervalInSeconds": 124},
|
||||
@ -375,9 +351,7 @@ def test_create_http_stream():
|
||||
"BufferingHints": {"SizeInMBs": 123, "IntervalInSeconds": 124},
|
||||
"CloudWatchLoggingOptions": {"Enabled": False},
|
||||
"S3DestinationDescription": {
|
||||
"RoleARN": "arn:aws:iam::{}:role/firehose_delivery_role".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
"RoleARN": f"arn:aws:iam::{ACCOUNT_ID}:role/firehose_delivery_role",
|
||||
"BucketARN": "arn:aws:s3:::firehose-test",
|
||||
"Prefix": "myFolder/",
|
||||
"BufferingHints": {
|
||||
|
@ -15,9 +15,7 @@ from .fixtures.schema_registry import (
|
||||
def create_database_input(database_name):
|
||||
database_input = copy.deepcopy(DATABASE_INPUT)
|
||||
database_input["Name"] = database_name
|
||||
database_input["LocationUri"] = "s3://my-bucket/{database_name}".format(
|
||||
database_name=database_name
|
||||
)
|
||||
database_input["LocationUri"] = f"s3://my-bucket/{database_name}"
|
||||
return database_input
|
||||
|
||||
|
||||
@ -42,9 +40,7 @@ def create_table_input(database_name, table_name, columns=None, partition_keys=N
|
||||
table_input["StorageDescriptor"]["Columns"] = columns or []
|
||||
table_input["StorageDescriptor"][
|
||||
"Location"
|
||||
] = "s3://my-bucket/{database_name}/{table_name}".format(
|
||||
database_name=database_name, table_name=table_name
|
||||
)
|
||||
] = f"s3://my-bucket/{database_name}/{table_name}"
|
||||
return table_input
|
||||
|
||||
|
||||
@ -93,9 +89,7 @@ def create_column(name, type_, comment=None, parameters=None):
|
||||
|
||||
|
||||
def create_partition_input(database_name, table_name, values=None, columns=None):
|
||||
root_path = "s3://my-bucket/{database_name}/{table_name}".format(
|
||||
database_name=database_name, table_name=table_name
|
||||
)
|
||||
root_path = f"s3://my-bucket/{database_name}/{table_name}"
|
||||
|
||||
part_input = copy.deepcopy(PARTITION_INPUT)
|
||||
part_input["Values"] = values or []
|
||||
|
@ -552,7 +552,7 @@ def test_batch_create_partition():
|
||||
|
||||
partition_inputs = []
|
||||
for i in range(0, 20):
|
||||
values = ["2018-10-{:2}".format(i)]
|
||||
values = [f"2018-10-{i:2}"]
|
||||
part_input = helpers.create_partition_input(
|
||||
database_name, table_name, values=values
|
||||
)
|
||||
@ -994,7 +994,7 @@ def test_batch_delete_partition():
|
||||
|
||||
partition_inputs = []
|
||||
for i in range(0, 20):
|
||||
values = ["2018-10-{:2}".format(i)]
|
||||
values = [f"2018-10-{i:2}"]
|
||||
part_input = helpers.create_partition_input(
|
||||
database_name, table_name, values=values
|
||||
)
|
||||
@ -1027,7 +1027,7 @@ def test_batch_delete_partition_with_bad_partitions():
|
||||
|
||||
partition_inputs = []
|
||||
for i in range(0, 20):
|
||||
values = ["2018-10-{:2}".format(i)]
|
||||
values = [f"2018-10-{i:2}"]
|
||||
part_input = helpers.create_partition_input(
|
||||
database_name, table_name, values=values
|
||||
)
|
||||
|
@ -514,7 +514,7 @@ def test_create_policy():
|
||||
PolicyName="TestCreatePolicy", PolicyDocument=MOCK_POLICY
|
||||
)
|
||||
response["Policy"]["Arn"].should.equal(
|
||||
"arn:aws:iam::{}:policy/TestCreatePolicy".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreatePolicy"
|
||||
)
|
||||
|
||||
|
||||
@ -547,14 +547,12 @@ def test_create_policy_versions():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
with pytest.raises(ClientError):
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestCreatePolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreatePolicyVersion",
|
||||
PolicyDocument='{"some":"policy"}',
|
||||
)
|
||||
conn.create_policy(PolicyName="TestCreatePolicyVersion", PolicyDocument=MOCK_POLICY)
|
||||
version = conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestCreatePolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreatePolicyVersion",
|
||||
PolicyDocument=MOCK_POLICY,
|
||||
SetAsDefault=True,
|
||||
)
|
||||
@ -562,11 +560,11 @@ def test_create_policy_versions():
|
||||
version.get("PolicyVersion").get("VersionId").should.equal("v2")
|
||||
version.get("PolicyVersion").get("IsDefaultVersion").should.be.ok
|
||||
conn.delete_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestCreatePolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreatePolicyVersion",
|
||||
VersionId="v1",
|
||||
)
|
||||
version = conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestCreatePolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreatePolicyVersion",
|
||||
PolicyDocument=MOCK_POLICY,
|
||||
)
|
||||
version.get("PolicyVersion").get("VersionId").should.equal("v3")
|
||||
@ -581,16 +579,12 @@ def test_create_many_policy_versions():
|
||||
)
|
||||
for _ in range(0, 4):
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestCreateManyPolicyVersions".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreateManyPolicyVersions",
|
||||
PolicyDocument=MOCK_POLICY,
|
||||
)
|
||||
with pytest.raises(ClientError):
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestCreateManyPolicyVersions".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreateManyPolicyVersions",
|
||||
PolicyDocument=MOCK_POLICY,
|
||||
)
|
||||
|
||||
@ -602,23 +596,17 @@ def test_set_default_policy_version():
|
||||
PolicyName="TestSetDefaultPolicyVersion", PolicyDocument=MOCK_POLICY
|
||||
)
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestSetDefaultPolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestSetDefaultPolicyVersion",
|
||||
PolicyDocument=MOCK_POLICY_2,
|
||||
SetAsDefault=True,
|
||||
)
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestSetDefaultPolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestSetDefaultPolicyVersion",
|
||||
PolicyDocument=MOCK_POLICY_3,
|
||||
SetAsDefault=True,
|
||||
)
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestSetDefaultPolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestSetDefaultPolicyVersion"
|
||||
)
|
||||
versions.get("Versions")[0].get("Document").should.equal(json.loads(MOCK_POLICY))
|
||||
versions.get("Versions")[0].get("IsDefaultVersion").shouldnt.be.ok
|
||||
@ -628,15 +616,11 @@ def test_set_default_policy_version():
|
||||
versions.get("Versions")[2].get("IsDefaultVersion").should.be.ok
|
||||
|
||||
conn.set_default_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestSetDefaultPolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestSetDefaultPolicyVersion",
|
||||
VersionId="v1",
|
||||
)
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestSetDefaultPolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestSetDefaultPolicyVersion"
|
||||
)
|
||||
versions.get("Versions")[0].get("Document").should.equal(json.loads(MOCK_POLICY))
|
||||
versions.get("Versions")[0].get("IsDefaultVersion").should.be.ok
|
||||
@ -647,20 +631,16 @@ def test_set_default_policy_version():
|
||||
|
||||
# Set default version for non-existing policy
|
||||
conn.set_default_policy_version.when.called_with(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestNonExistingPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestNonExistingPolicy",
|
||||
VersionId="v1",
|
||||
).should.throw(
|
||||
ClientError,
|
||||
"Policy arn:aws:iam::{}:policy/TestNonExistingPolicy not found".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
f"Policy arn:aws:iam::{ACCOUNT_ID}:policy/TestNonExistingPolicy not found",
|
||||
)
|
||||
|
||||
# Set default version for incorrect version
|
||||
conn.set_default_policy_version.when.called_with(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestSetDefaultPolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestSetDefaultPolicyVersion",
|
||||
VersionId="wrong_version_id",
|
||||
).should.throw(
|
||||
ClientError,
|
||||
@ -669,15 +649,11 @@ def test_set_default_policy_version():
|
||||
|
||||
# Set default version for non-existing version
|
||||
conn.set_default_policy_version.when.called_with(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestSetDefaultPolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestSetDefaultPolicyVersion",
|
||||
VersionId="v4",
|
||||
).should.throw(
|
||||
ClientError,
|
||||
"Policy arn:aws:iam::{}:policy/TestSetDefaultPolicyVersion version v4 does not exist or is not attachable.".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
f"Policy arn:aws:iam::{ACCOUNT_ID}:policy/TestSetDefaultPolicyVersion version v4 does not exist or is not attachable.",
|
||||
)
|
||||
|
||||
|
||||
@ -686,10 +662,10 @@ def test_get_policy():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestGetPolicy", PolicyDocument=MOCK_POLICY)
|
||||
policy = conn.get_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestGetPolicy".format(ACCOUNT_ID)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestGetPolicy"
|
||||
)
|
||||
policy["Policy"]["Arn"].should.equal(
|
||||
"arn:aws:iam::{}:policy/TestGetPolicy".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:policy/TestGetPolicy"
|
||||
)
|
||||
|
||||
|
||||
@ -712,16 +688,16 @@ def test_get_policy_version():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestGetPolicyVersion", PolicyDocument=MOCK_POLICY)
|
||||
version = conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestGetPolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestGetPolicyVersion",
|
||||
PolicyDocument=MOCK_POLICY,
|
||||
)
|
||||
with pytest.raises(ClientError):
|
||||
conn.get_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestGetPolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestGetPolicyVersion",
|
||||
VersionId="v2-does-not-exist",
|
||||
)
|
||||
retrieved = conn.get_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestGetPolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestGetPolicyVersion",
|
||||
VersionId=version.get("PolicyVersion").get("VersionId"),
|
||||
)
|
||||
retrieved.get("PolicyVersion").get("Document").should.equal(json.loads(MOCK_POLICY))
|
||||
@ -766,25 +742,25 @@ def test_list_policy_versions():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
with pytest.raises(ClientError):
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestListPolicyVersions".format(ACCOUNT_ID)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestListPolicyVersions"
|
||||
)
|
||||
conn.create_policy(PolicyName="TestListPolicyVersions", PolicyDocument=MOCK_POLICY)
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestListPolicyVersions".format(ACCOUNT_ID)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestListPolicyVersions"
|
||||
)
|
||||
versions.get("Versions")[0].get("VersionId").should.equal("v1")
|
||||
versions.get("Versions")[0].get("IsDefaultVersion").should.be.ok
|
||||
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestListPolicyVersions".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestListPolicyVersions",
|
||||
PolicyDocument=MOCK_POLICY_2,
|
||||
)
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestListPolicyVersions".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestListPolicyVersions",
|
||||
PolicyDocument=MOCK_POLICY_3,
|
||||
)
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestListPolicyVersions".format(ACCOUNT_ID)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestListPolicyVersions"
|
||||
)
|
||||
versions.get("Versions")[1].get("Document").should.equal(json.loads(MOCK_POLICY_2))
|
||||
versions.get("Versions")[1].get("IsDefaultVersion").shouldnt.be.ok
|
||||
@ -797,22 +773,20 @@ def test_delete_policy_version():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestDeletePolicyVersion", PolicyDocument=MOCK_POLICY)
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestDeletePolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestDeletePolicyVersion",
|
||||
PolicyDocument=MOCK_POLICY,
|
||||
)
|
||||
with pytest.raises(ClientError):
|
||||
conn.delete_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestDeletePolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestDeletePolicyVersion",
|
||||
VersionId="v2-nope-this-does-not-exist",
|
||||
)
|
||||
conn.delete_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestDeletePolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestDeletePolicyVersion",
|
||||
VersionId="v2",
|
||||
)
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestDeletePolicyVersion".format(ACCOUNT_ID)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestDeletePolicyVersion"
|
||||
)
|
||||
len(versions.get("Versions")).should.equal(1)
|
||||
|
||||
@ -822,14 +796,12 @@ def test_delete_default_policy_version():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestDeletePolicyVersion", PolicyDocument=MOCK_POLICY)
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestDeletePolicyVersion".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestDeletePolicyVersion",
|
||||
PolicyDocument=MOCK_POLICY_2,
|
||||
)
|
||||
with pytest.raises(ClientError):
|
||||
conn.delete_policy_version(
|
||||
PolicyArn="arn:aws:iam::{}:policy/TestDeletePolicyVersion".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestDeletePolicyVersion",
|
||||
VersionId="v1",
|
||||
)
|
||||
|
||||
@ -849,9 +821,7 @@ def test_create_policy_with_tags():
|
||||
|
||||
# Get policy:
|
||||
policy = conn.get_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(
|
||||
ACCOUNT_ID, "TestCreatePolicyWithTags1"
|
||||
)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreatePolicyWithTags1"
|
||||
)["Policy"]
|
||||
assert len(policy["Tags"]) == 2
|
||||
assert policy["Tags"][0]["Key"] == "somekey"
|
||||
@ -872,9 +842,7 @@ def test_create_policy_with_empty_tag_value():
|
||||
Tags=[{"Key": "somekey", "Value": ""}],
|
||||
)
|
||||
tags = conn.list_policy_tags(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(
|
||||
ACCOUNT_ID, "TestCreatePolicyWithTags2"
|
||||
)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestCreatePolicyWithTags2"
|
||||
)
|
||||
assert len(tags["Tags"]) == 1
|
||||
assert tags["Tags"][0]["Key"] == "somekey"
|
||||
@ -994,7 +962,7 @@ def test_create_policy_with_no_tags():
|
||||
|
||||
# Get without tags:
|
||||
policy = conn.get_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy")
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy"
|
||||
)["Policy"]
|
||||
assert not policy.get("Tags")
|
||||
|
||||
@ -1004,7 +972,7 @@ def test_get_policy_with_tags():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1013,7 +981,7 @@ def test_get_policy_with_tags():
|
||||
|
||||
# Get policy:
|
||||
policy = conn.get_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy")
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy"
|
||||
)["Policy"]
|
||||
assert len(policy["Tags"]) == 2
|
||||
assert policy["Tags"][0]["Key"] == "somekey"
|
||||
@ -1027,7 +995,7 @@ def test_list_policy_tags():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1036,7 +1004,7 @@ def test_list_policy_tags():
|
||||
|
||||
# List_policy_tags:
|
||||
tags = conn.list_policy_tags(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy")
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy"
|
||||
)
|
||||
assert len(tags["Tags"]) == 2
|
||||
assert tags["Tags"][0]["Key"] == "somekey"
|
||||
@ -1052,7 +1020,7 @@ def test_list_policy_tags_pagination():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1061,7 +1029,7 @@ def test_list_policy_tags_pagination():
|
||||
|
||||
# Test pagination:
|
||||
tags = conn.list_policy_tags(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
MaxItems=1,
|
||||
)
|
||||
assert len(tags["Tags"]) == 1
|
||||
@ -1071,7 +1039,7 @@ def test_list_policy_tags_pagination():
|
||||
assert tags["Marker"] == "1"
|
||||
|
||||
tags = conn.list_policy_tags(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Marker=tags["Marker"],
|
||||
)
|
||||
assert len(tags["Tags"]) == 1
|
||||
@ -1086,7 +1054,7 @@ def test_updating_existing_tag():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1095,11 +1063,11 @@ def test_updating_existing_tag():
|
||||
|
||||
# Test updating an existing tag:
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[{"Key": "somekey", "Value": "somenewvalue"}],
|
||||
)
|
||||
tags = conn.list_policy_tags(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy")
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy"
|
||||
)
|
||||
assert len(tags["Tags"]) == 2
|
||||
assert tags["Tags"][0]["Key"] == "somekey"
|
||||
@ -1111,7 +1079,7 @@ def test_updating_existing_tag_with_empty_value():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1120,11 +1088,11 @@ def test_updating_existing_tag_with_empty_value():
|
||||
|
||||
# Empty is good:
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[{"Key": "somekey", "Value": ""}],
|
||||
)
|
||||
tags = conn.list_policy_tags(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy")
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy"
|
||||
)
|
||||
assert len(tags["Tags"]) == 2
|
||||
assert tags["Tags"][0]["Key"] == "somekey"
|
||||
@ -1136,7 +1104,7 @@ def test_updating_existing_tagged_policy_with_too_many_tags():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1149,7 +1117,7 @@ def test_updating_existing_tagged_policy_with_too_many_tags():
|
||||
map(lambda x: {"Key": str(x), "Value": str(x)}, range(0, 51))
|
||||
)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=too_many_tags,
|
||||
)
|
||||
assert (
|
||||
@ -1163,7 +1131,7 @@ def test_updating_existing_tagged_policy_with_duplicate_tag():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1173,7 +1141,7 @@ def test_updating_existing_tagged_policy_with_duplicate_tag():
|
||||
# With a duplicate tag:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[{"Key": "0", "Value": ""}, {"Key": "0", "Value": ""}],
|
||||
)
|
||||
assert (
|
||||
@ -1187,7 +1155,7 @@ def test_updating_existing_tagged_policy_with_duplicate_tag_different_casing():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1197,7 +1165,7 @@ def test_updating_existing_tagged_policy_with_duplicate_tag_different_casing():
|
||||
# Duplicate tag with different casing:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[{"Key": "a", "Value": ""}, {"Key": "A", "Value": ""}],
|
||||
)
|
||||
assert (
|
||||
@ -1211,7 +1179,7 @@ def test_updating_existing_tagged_policy_with_large_key():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1221,7 +1189,7 @@ def test_updating_existing_tagged_policy_with_large_key():
|
||||
# With a really big key:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[{"Key": "0" * 129, "Value": ""}],
|
||||
)
|
||||
assert (
|
||||
@ -1235,7 +1203,7 @@ def test_updating_existing_tagged_policy_with_large_value():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1245,7 +1213,7 @@ def test_updating_existing_tagged_policy_with_large_value():
|
||||
# With a really big value:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[{"Key": "0", "Value": "0" * 257}],
|
||||
)
|
||||
assert (
|
||||
@ -1259,7 +1227,7 @@ def test_updating_existing_tagged_policy_with_invalid_character():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
conn.create_policy(PolicyName="TestTagPolicy", PolicyDocument=MOCK_POLICY)
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1269,7 +1237,7 @@ def test_updating_existing_tagged_policy_with_invalid_character():
|
||||
# With an invalid character:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestTagPolicy",
|
||||
Tags=[{"Key": "NOWAY!", "Value": ""}],
|
||||
)
|
||||
assert (
|
||||
@ -1285,7 +1253,7 @@ def test_tag_non_existant_policy():
|
||||
# With a policy that doesn't exist:
|
||||
with pytest.raises(ClientError):
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "NotAPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/NotAPolicy",
|
||||
Tags=[{"Key": "some", "Value": "value"}],
|
||||
)
|
||||
|
||||
@ -1297,7 +1265,7 @@ def test_untag_policy():
|
||||
|
||||
# With proper tag values:
|
||||
conn.tag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestUnTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestUnTagPolicy",
|
||||
Tags=[
|
||||
{"Key": "somekey", "Value": "somevalue"},
|
||||
{"Key": "someotherkey", "Value": "someothervalue"},
|
||||
@ -1306,11 +1274,11 @@ def test_untag_policy():
|
||||
|
||||
# Remove them:
|
||||
conn.untag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestUnTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestUnTagPolicy",
|
||||
TagKeys=["somekey"],
|
||||
)
|
||||
tags = conn.list_policy_tags(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestUnTagPolicy")
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestUnTagPolicy"
|
||||
)
|
||||
assert len(tags["Tags"]) == 1
|
||||
assert tags["Tags"][0]["Key"] == "someotherkey"
|
||||
@ -1318,11 +1286,11 @@ def test_untag_policy():
|
||||
|
||||
# And again:
|
||||
conn.untag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestUnTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestUnTagPolicy",
|
||||
TagKeys=["someotherkey"],
|
||||
)
|
||||
tags = conn.list_policy_tags(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestUnTagPolicy")
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestUnTagPolicy"
|
||||
)
|
||||
assert not tags["Tags"]
|
||||
|
||||
@ -1330,7 +1298,7 @@ def test_untag_policy():
|
||||
# With more than 50 tags:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.untag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestUnTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestUnTagPolicy",
|
||||
TagKeys=[str(x) for x in range(0, 51)],
|
||||
)
|
||||
assert (
|
||||
@ -1342,7 +1310,7 @@ def test_untag_policy():
|
||||
# With a really big key:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.untag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestUnTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestUnTagPolicy",
|
||||
TagKeys=["0" * 129],
|
||||
)
|
||||
assert (
|
||||
@ -1354,7 +1322,7 @@ def test_untag_policy():
|
||||
# With an invalid character:
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.untag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "TestUnTagPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/TestUnTagPolicy",
|
||||
TagKeys=["NOWAY!"],
|
||||
)
|
||||
assert (
|
||||
@ -1366,7 +1334,7 @@ def test_untag_policy():
|
||||
# With a policy that doesn't exist:
|
||||
with pytest.raises(ClientError):
|
||||
conn.untag_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, "NotAPolicy"),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/NotAPolicy",
|
||||
TagKeys=["somevalue"],
|
||||
)
|
||||
|
||||
@ -1378,7 +1346,7 @@ def test_create_user_boto():
|
||||
u["Path"].should.equal("/")
|
||||
u["UserName"].should.equal("my-user")
|
||||
u.should.have.key("UserId")
|
||||
u["Arn"].should.equal("arn:aws:iam::{}:user/my-user".format(ACCOUNT_ID))
|
||||
u["Arn"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:user/my-user")
|
||||
u["CreateDate"].should.be.a(datetime)
|
||||
|
||||
with pytest.raises(ClientError) as ex:
|
||||
@ -1403,7 +1371,7 @@ def test_get_user():
|
||||
u["Path"].should.equal("/")
|
||||
u["UserName"].should.equal("my-user")
|
||||
u.should.have.key("UserId")
|
||||
u["Arn"].should.equal("arn:aws:iam::{}:user/my-user".format(ACCOUNT_ID))
|
||||
u["Arn"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:user/my-user")
|
||||
u["CreateDate"].should.be.a(datetime)
|
||||
|
||||
|
||||
@ -1438,7 +1406,7 @@ def test_list_users():
|
||||
user = response["Users"][0]
|
||||
user["UserName"].should.equal("my-user")
|
||||
user["Path"].should.equal("/")
|
||||
user["Arn"].should.equal("arn:aws:iam::{}:user/my-user".format(ACCOUNT_ID))
|
||||
user["Arn"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:user/my-user")
|
||||
response["IsTruncated"].should.equal(False)
|
||||
|
||||
conn.create_user(UserName="my-user-1", Path="myUser")
|
||||
@ -1620,9 +1588,7 @@ def test_create_virtual_mfa_device():
|
||||
response = client.create_virtual_mfa_device(VirtualMFADeviceName="test-device")
|
||||
device = response["VirtualMFADevice"]
|
||||
|
||||
device["SerialNumber"].should.equal(
|
||||
"arn:aws:iam::{}:mfa/test-device".format(ACCOUNT_ID)
|
||||
)
|
||||
device["SerialNumber"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:mfa/test-device")
|
||||
device["Base32StringSeed"].decode("ascii").should.match("[A-Z234567]")
|
||||
device["QRCodePNG"].should_not.equal("")
|
||||
|
||||
@ -1631,9 +1597,7 @@ def test_create_virtual_mfa_device():
|
||||
)
|
||||
device = response["VirtualMFADevice"]
|
||||
|
||||
device["SerialNumber"].should.equal(
|
||||
"arn:aws:iam::{}:mfa/test-device-2".format(ACCOUNT_ID)
|
||||
)
|
||||
device["SerialNumber"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:mfa/test-device-2")
|
||||
device["Base32StringSeed"].decode("ascii").should.match("[A-Z234567]")
|
||||
device["QRCodePNG"].should_not.equal("")
|
||||
|
||||
@ -1643,7 +1607,7 @@ def test_create_virtual_mfa_device():
|
||||
device = response["VirtualMFADevice"]
|
||||
|
||||
device["SerialNumber"].should.equal(
|
||||
"arn:aws:iam::{}:mfa/test/test-device".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:mfa/test/test-device"
|
||||
)
|
||||
device["Base32StringSeed"].decode("ascii").should.match("[A-Z234567]")
|
||||
device["QRCodePNG"].should_not.equal("")
|
||||
@ -1677,7 +1641,7 @@ def test_create_virtual_mfa_device_errors():
|
||||
"It must begin and end with / and contain only alphanumeric characters and/or / characters.",
|
||||
)
|
||||
|
||||
too_long_path = "/{}/".format("b" * 511)
|
||||
too_long_path = f"/{('b' * 511)}/"
|
||||
client.create_virtual_mfa_device.when.called_with(
|
||||
Path=too_long_path, VirtualMFADeviceName="test-device"
|
||||
).should.throw(
|
||||
@ -1706,12 +1670,12 @@ def test_delete_virtual_mfa_device():
|
||||
def test_delete_virtual_mfa_device_errors():
|
||||
client = boto3.client("iam", region_name="us-east-1")
|
||||
|
||||
serial_number = "arn:aws:iam::{}:mfa/not-existing".format(ACCOUNT_ID)
|
||||
serial_number = f"arn:aws:iam::{ACCOUNT_ID}:mfa/not-existing"
|
||||
client.delete_virtual_mfa_device.when.called_with(
|
||||
SerialNumber=serial_number
|
||||
).should.throw(
|
||||
ClientError,
|
||||
"VirtualMFADevice with serial number {0} doesn't exist.".format(serial_number),
|
||||
f"VirtualMFADevice with serial number {serial_number} doesn't exist.",
|
||||
)
|
||||
|
||||
|
||||
@ -1796,9 +1760,7 @@ def test_enable_virtual_mfa_device():
|
||||
device["User"]["Path"].should.equal("/")
|
||||
device["User"]["UserName"].should.equal("test-user")
|
||||
device["User"]["UserId"].should.match("[a-z0-9]+")
|
||||
device["User"]["Arn"].should.equal(
|
||||
"arn:aws:iam::{}:user/test-user".format(ACCOUNT_ID)
|
||||
)
|
||||
device["User"]["Arn"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:user/test-user")
|
||||
device["User"]["CreateDate"].should.be.a(datetime)
|
||||
device["User"]["Tags"].should.equal(tags)
|
||||
device["EnableDate"].should.be.a(datetime)
|
||||
@ -2324,7 +2286,7 @@ def test_get_account_authorization_details():
|
||||
)
|
||||
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
boundary = "arn:aws:iam::{}:policy/boundary".format(ACCOUNT_ID)
|
||||
boundary = f"arn:aws:iam::{ACCOUNT_ID}:policy/boundary"
|
||||
conn.create_role(
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
@ -2351,11 +2313,11 @@ def test_get_account_authorization_details():
|
||||
|
||||
conn.attach_user_policy(
|
||||
UserName="testUser",
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
)
|
||||
conn.attach_group_policy(
|
||||
GroupName="testGroup",
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
)
|
||||
|
||||
conn.add_user_to_group(UserName="testUser", GroupName="testGroup")
|
||||
@ -2375,7 +2337,7 @@ def test_get_account_authorization_details():
|
||||
)
|
||||
conn.attach_role_policy(
|
||||
RoleName="my-role",
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
)
|
||||
|
||||
result = conn.get_account_authorization_details(Filter=["Role"])
|
||||
@ -2392,7 +2354,7 @@ def test_get_account_authorization_details():
|
||||
"PermissionsBoundary"
|
||||
] == {
|
||||
"PermissionsBoundaryType": "PermissionsBoundaryPolicy",
|
||||
"PermissionsBoundaryArn": "arn:aws:iam::{}:policy/boundary".format(ACCOUNT_ID),
|
||||
"PermissionsBoundaryArn": f"arn:aws:iam::{ACCOUNT_ID}:policy/boundary",
|
||||
}
|
||||
assert len(result["RoleDetailList"][0]["Tags"]) == 2
|
||||
assert len(result["RoleDetailList"][0]["RolePolicyList"]) == 1
|
||||
@ -2401,9 +2363,10 @@ def test_get_account_authorization_details():
|
||||
result["RoleDetailList"][0]["AttachedManagedPolicies"][0]["PolicyName"]
|
||||
== "testPolicy"
|
||||
)
|
||||
assert result["RoleDetailList"][0]["AttachedManagedPolicies"][0][
|
||||
"PolicyArn"
|
||||
] == "arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID)
|
||||
assert (
|
||||
result["RoleDetailList"][0]["AttachedManagedPolicies"][0]["PolicyArn"]
|
||||
== f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy"
|
||||
)
|
||||
assert result["RoleDetailList"][0]["RolePolicyList"][0][
|
||||
"PolicyDocument"
|
||||
] == json.loads(test_policy)
|
||||
@ -2420,9 +2383,10 @@ def test_get_account_authorization_details():
|
||||
result["UserDetailList"][0]["AttachedManagedPolicies"][0]["PolicyName"]
|
||||
== "testPolicy"
|
||||
)
|
||||
assert result["UserDetailList"][0]["AttachedManagedPolicies"][0][
|
||||
"PolicyArn"
|
||||
] == "arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID)
|
||||
assert (
|
||||
result["UserDetailList"][0]["AttachedManagedPolicies"][0]["PolicyArn"]
|
||||
== f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy"
|
||||
)
|
||||
assert result["UserDetailList"][0]["UserPolicyList"][0][
|
||||
"PolicyDocument"
|
||||
] == json.loads(test_policy)
|
||||
@ -2438,9 +2402,10 @@ def test_get_account_authorization_details():
|
||||
result["GroupDetailList"][0]["AttachedManagedPolicies"][0]["PolicyName"]
|
||||
== "testPolicy"
|
||||
)
|
||||
assert result["GroupDetailList"][0]["AttachedManagedPolicies"][0][
|
||||
"PolicyArn"
|
||||
] == "arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID)
|
||||
assert (
|
||||
result["GroupDetailList"][0]["AttachedManagedPolicies"][0]["PolicyArn"]
|
||||
== f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy"
|
||||
)
|
||||
assert result["GroupDetailList"][0]["GroupPolicyList"][0][
|
||||
"PolicyDocument"
|
||||
] == json.loads(test_policy)
|
||||
@ -2508,14 +2473,16 @@ def test_signing_certs():
|
||||
UserName="notauser", CertificateId=cert_id, Status="Inactive"
|
||||
)
|
||||
|
||||
fake_id_name = "x" * 32
|
||||
with pytest.raises(ClientError) as ce:
|
||||
client.update_signing_certificate(
|
||||
UserName="testing", CertificateId="x" * 32, Status="Inactive"
|
||||
UserName="testing", CertificateId=fake_id_name, Status="Inactive"
|
||||
)
|
||||
|
||||
assert ce.value.response["Error"][
|
||||
"Message"
|
||||
] == "The Certificate with id {id} cannot be found.".format(id="x" * 32)
|
||||
assert (
|
||||
ce.value.response["Error"]["Message"]
|
||||
== f"The Certificate with id {fake_id_name} cannot be found."
|
||||
)
|
||||
|
||||
# List the certs:
|
||||
resp = client.list_signing_certificates(UserName="testing")["Certificates"]
|
||||
@ -2540,7 +2507,7 @@ def test_create_saml_provider():
|
||||
Name="TestSAMLProvider", SAMLMetadataDocument="a" * 1024
|
||||
)
|
||||
response["SAMLProviderArn"].should.equal(
|
||||
"arn:aws:iam::{}:saml-provider/TestSAMLProvider".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:saml-provider/TestSAMLProvider"
|
||||
)
|
||||
|
||||
|
||||
@ -2562,7 +2529,7 @@ def test_list_saml_providers():
|
||||
conn.create_saml_provider(Name="TestSAMLProvider", SAMLMetadataDocument="a" * 1024)
|
||||
response = conn.list_saml_providers()
|
||||
response["SAMLProviderList"][0]["Arn"].should.equal(
|
||||
"arn:aws:iam::{}:saml-provider/TestSAMLProvider".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:saml-provider/TestSAMLProvider"
|
||||
)
|
||||
|
||||
|
||||
@ -2583,9 +2550,10 @@ def test_delete_saml_provider():
|
||||
with pytest.raises(ClientError) as ce:
|
||||
conn.delete_signing_certificate(UserName="testing", CertificateId=cert_id)
|
||||
|
||||
assert ce.value.response["Error"][
|
||||
"Message"
|
||||
] == "The Certificate with id {id} cannot be found.".format(id=cert_id)
|
||||
assert (
|
||||
ce.value.response["Error"]["Message"]
|
||||
== f"The Certificate with id {cert_id} cannot be found."
|
||||
)
|
||||
|
||||
# Verify that it's not in the list:
|
||||
resp = conn.list_signing_certificates(UserName="testing")
|
||||
@ -2989,11 +2957,11 @@ def test_list_entities_for_policy():
|
||||
|
||||
conn.attach_user_policy(
|
||||
UserName="testUser",
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
)
|
||||
conn.attach_group_policy(
|
||||
GroupName="testGroup",
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
)
|
||||
|
||||
conn.add_user_to_group(UserName="testUser", GroupName="testGroup")
|
||||
@ -3013,11 +2981,11 @@ def test_list_entities_for_policy():
|
||||
)
|
||||
conn.attach_role_policy(
|
||||
RoleName="my-role",
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
)
|
||||
|
||||
response = conn.list_entities_for_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
EntityFilter="Role",
|
||||
)
|
||||
assert response["PolicyRoles"][0]["RoleName"] == "my-role"
|
||||
@ -3026,7 +2994,7 @@ def test_list_entities_for_policy():
|
||||
response["PolicyUsers"].should.equal([])
|
||||
|
||||
response = conn.list_entities_for_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
EntityFilter="User",
|
||||
)
|
||||
assert response["PolicyUsers"][0]["UserName"] == "testUser"
|
||||
@ -3035,7 +3003,7 @@ def test_list_entities_for_policy():
|
||||
response["PolicyRoles"].should.equal([])
|
||||
|
||||
response = conn.list_entities_for_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
EntityFilter="Group",
|
||||
)
|
||||
assert response["PolicyGroups"][0]["GroupName"] == "testGroup"
|
||||
@ -3044,7 +3012,7 @@ def test_list_entities_for_policy():
|
||||
response["PolicyUsers"].should.equal([])
|
||||
|
||||
response = conn.list_entities_for_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID),
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy",
|
||||
EntityFilter="LocalManagedPolicy",
|
||||
)
|
||||
assert response["PolicyGroups"][0]["GroupName"] == "testGroup"
|
||||
@ -3057,7 +3025,7 @@ def test_list_entities_for_policy():
|
||||
|
||||
# Return everything when no entity is specified
|
||||
response = conn.list_entities_for_policy(
|
||||
PolicyArn="arn:aws:iam::{}:policy/testPolicy".format(ACCOUNT_ID)
|
||||
PolicyArn=f"arn:aws:iam::{ACCOUNT_ID}:policy/testPolicy"
|
||||
)
|
||||
response["PolicyGroups"][0]["GroupName"].should.equal("testGroup")
|
||||
response["PolicyUsers"][0]["UserName"].should.equal("testUser")
|
||||
@ -3074,9 +3042,7 @@ def test_create_role_no_path():
|
||||
resp = conn.create_role(
|
||||
RoleName="my-role", AssumeRolePolicyDocument="some policy", Description="test"
|
||||
)
|
||||
resp.get("Role").get("Arn").should.equal(
|
||||
"arn:aws:iam::{}:role/my-role".format(ACCOUNT_ID)
|
||||
)
|
||||
resp.get("Role").get("Arn").should.equal(f"arn:aws:iam::{ACCOUNT_ID}:role/my-role")
|
||||
resp.get("Role").should_not.have.key("PermissionsBoundary")
|
||||
resp.get("Role").get("Description").should.equal("test")
|
||||
|
||||
@ -3084,7 +3050,7 @@ def test_create_role_no_path():
|
||||
@mock_iam()
|
||||
def test_create_role_with_permissions_boundary():
|
||||
conn = boto3.client("iam", region_name="us-east-1")
|
||||
boundary = "arn:aws:iam::{}:policy/boundary".format(ACCOUNT_ID)
|
||||
boundary = f"arn:aws:iam::{ACCOUNT_ID}:policy/boundary"
|
||||
resp = conn.create_role(
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
@ -3139,7 +3105,7 @@ def test_create_role_with_same_name_should_fail():
|
||||
)
|
||||
err.value.response["Error"]["Code"].should.equal("EntityAlreadyExists")
|
||||
err.value.response["Error"]["Message"].should.equal(
|
||||
"Role with name {0} already exists.".format(test_role_name)
|
||||
f"Role with name {test_role_name} already exists."
|
||||
)
|
||||
|
||||
|
||||
@ -3153,9 +3119,7 @@ def test_create_policy_with_same_name_should_fail():
|
||||
iam.create_policy(PolicyName=test_policy_name, PolicyDocument=MOCK_POLICY)
|
||||
err.value.response["Error"]["Code"].should.equal("EntityAlreadyExists")
|
||||
err.value.response["Error"]["Message"].should.equal(
|
||||
"A policy called {0} already exists. Duplicate names are not allowed.".format(
|
||||
test_policy_name
|
||||
)
|
||||
f"A policy called {test_policy_name} already exists. Duplicate names are not allowed."
|
||||
)
|
||||
|
||||
|
||||
@ -3237,7 +3201,7 @@ def test_get_account_password_policy_errors():
|
||||
|
||||
client.get_account_password_policy.when.called_with().should.throw(
|
||||
ClientError,
|
||||
"The Password Policy with domain name {} cannot be found.".format(ACCOUNT_ID),
|
||||
f"The Password Policy with domain name {ACCOUNT_ID} cannot be found.",
|
||||
)
|
||||
|
||||
|
||||
@ -3254,7 +3218,7 @@ def test_delete_account_password_policy():
|
||||
|
||||
client.get_account_password_policy.when.called_with().should.throw(
|
||||
ClientError,
|
||||
"The Password Policy with domain name {} cannot be found.".format(ACCOUNT_ID),
|
||||
f"The Password Policy with domain name {ACCOUNT_ID} cannot be found.",
|
||||
)
|
||||
|
||||
|
||||
@ -3467,11 +3431,11 @@ def test_role_list_config_discovered_resources():
|
||||
this_role = role_config_query.backends[DEFAULT_ACCOUNT_ID][
|
||||
"global"
|
||||
].create_role(
|
||||
role_name="role{}".format(ix),
|
||||
role_name=f"role{ix}",
|
||||
assume_role_policy_document=None,
|
||||
path="/",
|
||||
permissions_boundary=None,
|
||||
description="role{}".format(ix),
|
||||
description=f"role{ix}",
|
||||
tags=[{"Key": "foo", "Value": "bar"}],
|
||||
max_session_duration=3600,
|
||||
)
|
||||
@ -3848,9 +3812,9 @@ def test_role_config_client():
|
||||
num_roles = 10
|
||||
for ix in range(1, num_roles + 1):
|
||||
this_policy = iam_client.create_role(
|
||||
RoleName="role{}".format(ix),
|
||||
RoleName=f"role{ix}",
|
||||
Path="/",
|
||||
Description="role{}".format(ix),
|
||||
Description=f"role{ix}",
|
||||
AssumeRolePolicyDocument=json.dumps("{ }"),
|
||||
)
|
||||
roles.append(
|
||||
@ -4075,10 +4039,10 @@ def test_policy_list_config_discovered_resources():
|
||||
this_policy = policy_config_query.backends[DEFAULT_ACCOUNT_ID][
|
||||
"global"
|
||||
].create_policy(
|
||||
description="policy{}".format(ix),
|
||||
description=f"policy{ix}",
|
||||
path="",
|
||||
policy_document=json.dumps(basic_policy),
|
||||
policy_name="policy{}".format(ix),
|
||||
policy_name=f"policy{ix}",
|
||||
tags=[],
|
||||
)
|
||||
policies.append({"id": this_policy.id, "name": this_policy.name})
|
||||
@ -4301,10 +4265,10 @@ def test_policy_config_client():
|
||||
num_policies = 10
|
||||
for ix in range(1, num_policies + 1):
|
||||
this_policy = iam_client.create_policy(
|
||||
PolicyName="policy{}".format(ix),
|
||||
PolicyName=f"policy{ix}",
|
||||
Path="/",
|
||||
PolicyDocument=json.dumps(basic_policy),
|
||||
Description="policy{}".format(ix),
|
||||
Description=f"policy{ix}",
|
||||
)
|
||||
policies.append(
|
||||
{
|
||||
@ -4479,7 +4443,7 @@ def test_list_roles_with_more_than_100_roles_no_max_items_defaults_to_100():
|
||||
iam = boto3.client("iam", region_name="us-east-1")
|
||||
for i in range(150):
|
||||
iam.create_role(
|
||||
RoleName="test_role_{}".format(i), AssumeRolePolicyDocument="some policy"
|
||||
RoleName=f"test_role_{i}", AssumeRolePolicyDocument="some policy"
|
||||
)
|
||||
response = iam.list_roles()
|
||||
roles = response["Roles"]
|
||||
@ -4493,7 +4457,7 @@ def test_list_roles_max_item_and_marker_values_adhered():
|
||||
iam = boto3.client("iam", region_name="us-east-1")
|
||||
for i in range(10):
|
||||
iam.create_role(
|
||||
RoleName="test_role_{}".format(i), AssumeRolePolicyDocument="some policy"
|
||||
RoleName=f"test_role_{i}", AssumeRolePolicyDocument="some policy"
|
||||
)
|
||||
response = iam.list_roles(MaxItems=2)
|
||||
roles = response["Roles"]
|
||||
@ -4633,7 +4597,7 @@ def test_tag_user_error_unknown_user_name():
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(404)
|
||||
ex.response["Error"]["Code"].should.contain("NoSuchEntity")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"The user with name {} cannot be found.".format(name)
|
||||
f"The user with name {name} cannot be found."
|
||||
)
|
||||
|
||||
|
||||
@ -4671,7 +4635,7 @@ def test_untag_user_error_unknown_user_name():
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(404)
|
||||
ex.response["Error"]["Code"].should.contain("NoSuchEntity")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"The user with name {} cannot be found.".format(name)
|
||||
f"The user with name {name} cannot be found."
|
||||
)
|
||||
|
||||
|
||||
|
@ -368,7 +368,7 @@ Resources:
|
||||
|
||||
policy_arn = provisioned_resource["PhysicalResourceId"]
|
||||
policy_arn.should.match(
|
||||
"arn:aws:iam::{}:policy/MyStack-ThePolicy-[A-Z0-9]+".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:policy/MyStack-ThePolicy-[A-Z0-9]+"
|
||||
)
|
||||
expected_name = policy_arn.split("/")[1]
|
||||
|
||||
@ -420,7 +420,7 @@ Resources:
|
||||
logical_resource_id.should.equal("ThePolicy")
|
||||
|
||||
policy_arn = provisioned_resource["PhysicalResourceId"]
|
||||
policy_arn.should.equal("arn:aws:iam::{}:policy/{}".format(ACCOUNT_ID, name))
|
||||
policy_arn.should.equal(f"arn:aws:iam::{ACCOUNT_ID}:policy/{name}")
|
||||
|
||||
policy = iam_client.get_policy(PolicyArn=policy_arn)["Policy"]
|
||||
policy.should.have.key("Arn").equal(policy_arn)
|
||||
@ -469,7 +469,7 @@ Resources:
|
||||
|
||||
policy_arn = provisioned_resource["PhysicalResourceId"]
|
||||
policy_arn.should.match(
|
||||
"rn:aws:iam::{}:policy/MyStack-ThePolicy-[A-Z0-9]+".format(ACCOUNT_ID)
|
||||
f"rn:aws:iam::{ACCOUNT_ID}:policy/MyStack-ThePolicy-[A-Z0-9]+"
|
||||
)
|
||||
|
||||
response = iam_client.list_entities_for_policy(PolicyArn=policy_arn)
|
||||
@ -520,7 +520,7 @@ Resources:
|
||||
|
||||
policy_arn = provisioned_resource["PhysicalResourceId"]
|
||||
policy_arn.should.match(
|
||||
"rn:aws:iam::{}:policy/MyStack-ThePolicy-[A-Z0-9]+".format(ACCOUNT_ID)
|
||||
f"rn:aws:iam::{ACCOUNT_ID}:policy/MyStack-ThePolicy-[A-Z0-9]+"
|
||||
)
|
||||
|
||||
response = iam_client.list_entities_for_policy(PolicyArn=policy_arn)
|
||||
@ -571,7 +571,7 @@ Resources:
|
||||
|
||||
policy_arn = provisioned_resource["PhysicalResourceId"]
|
||||
policy_arn.should.match(
|
||||
"rn:aws:iam::{}:policy/MyStack-ThePolicy-[A-Z0-9]+".format(ACCOUNT_ID)
|
||||
f"rn:aws:iam::{ACCOUNT_ID}:policy/MyStack-ThePolicy-[A-Z0-9]+"
|
||||
)
|
||||
|
||||
response = iam_client.list_entities_for_policy(PolicyArn=policy_arn)
|
||||
@ -594,7 +594,7 @@ def test_iam_cloudformation_create_user_policy():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
@ -646,7 +646,7 @@ def test_iam_cloudformation_update_user_policy():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
@ -733,7 +733,7 @@ def test_iam_cloudformation_delete_user_policy_having_generated_name():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
@ -788,7 +788,7 @@ def test_iam_cloudformation_create_role_policy():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
@ -840,7 +840,7 @@ def test_iam_cloudformation_update_role_policy():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
@ -927,7 +927,7 @@ def test_iam_cloudformation_delete_role_policy_having_generated_name():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
@ -982,7 +982,7 @@ def test_iam_cloudformation_create_group_policy():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
@ -1034,7 +1034,7 @@ def test_iam_cloudformation_update_group_policy():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
@ -1121,7 +1121,7 @@ def test_iam_cloudformation_delete_group_policy_having_generated_name():
|
||||
s3_client = boto3.client("s3", region_name="us-east-1")
|
||||
bucket_name = "my-bucket"
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
bucket_arn = "arn:aws:s3:::{0}".format(bucket_name)
|
||||
bucket_arn = f"arn:aws:s3:::{bucket_name}"
|
||||
|
||||
cf_client = boto3.client("cloudformation", region_name="us-east-1")
|
||||
stack_name = "MyStack"
|
||||
|
@ -40,7 +40,7 @@ def test_get_group_boto3():
|
||||
created["Path"].should.equal("/")
|
||||
created["GroupName"].should.equal("my-group")
|
||||
created.should.have.key("GroupId")
|
||||
created["Arn"].should.equal("arn:aws:iam::{}:group/my-group".format(ACCOUNT_ID))
|
||||
created["Arn"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:group/my-group")
|
||||
created["CreateDate"].should.be.a(datetime)
|
||||
|
||||
retrieved = conn.get_group(GroupName="my-group")["Group"]
|
||||
@ -63,15 +63,16 @@ def test_get_group_current():
|
||||
assert result["Group"]["GroupName"] == "my-group"
|
||||
assert isinstance(result["Group"]["CreateDate"], datetime)
|
||||
assert result["Group"]["GroupId"]
|
||||
assert result["Group"]["Arn"] == "arn:aws:iam::{}:group/my-group".format(ACCOUNT_ID)
|
||||
assert result["Group"]["Arn"] == f"arn:aws:iam::{ACCOUNT_ID}:group/my-group"
|
||||
assert not result["Users"]
|
||||
|
||||
# Make a group with a different path:
|
||||
other_group = conn.create_group(GroupName="my-other-group", Path="some/location")
|
||||
assert other_group["Group"]["Path"] == "some/location"
|
||||
assert other_group["Group"][
|
||||
"Arn"
|
||||
] == "arn:aws:iam::{}:group/some/location/my-other-group".format(ACCOUNT_ID)
|
||||
assert (
|
||||
other_group["Group"]["Arn"]
|
||||
== f"arn:aws:iam::{ACCOUNT_ID}:group/some/location/my-other-group"
|
||||
)
|
||||
|
||||
|
||||
@mock_iam
|
||||
|
@ -18,7 +18,7 @@ def test_create_open_id_connect_provider():
|
||||
)
|
||||
|
||||
response["OpenIDConnectProviderArn"].should.equal(
|
||||
"arn:aws:iam::{}:oidc-provider/example.com".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:oidc-provider/example.com"
|
||||
)
|
||||
|
||||
response = client.create_open_id_connect_provider(
|
||||
@ -26,7 +26,7 @@ def test_create_open_id_connect_provider():
|
||||
)
|
||||
|
||||
response["OpenIDConnectProviderArn"].should.equal(
|
||||
"arn:aws:iam::{}:oidc-provider/example.org".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:oidc-provider/example.org"
|
||||
)
|
||||
|
||||
response = client.create_open_id_connect_provider(
|
||||
@ -34,7 +34,7 @@ def test_create_open_id_connect_provider():
|
||||
)
|
||||
|
||||
response["OpenIDConnectProviderArn"].should.equal(
|
||||
"arn:aws:iam::{}:oidc-provider/example.org/oidc".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:oidc-provider/example.org/oidc"
|
||||
)
|
||||
|
||||
response = client.create_open_id_connect_provider(
|
||||
@ -42,7 +42,7 @@ def test_create_open_id_connect_provider():
|
||||
)
|
||||
|
||||
response["OpenIDConnectProviderArn"].should.equal(
|
||||
"arn:aws:iam::{}:oidc-provider/example.org/oidc-query".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:oidc-provider/example.org/oidc-query"
|
||||
)
|
||||
|
||||
|
||||
@ -106,7 +106,7 @@ def test_create_open_id_connect_provider_too_many_entries():
|
||||
def test_create_open_id_connect_provider_quota_error():
|
||||
client = boto3.client("iam", region_name="us-east-1")
|
||||
|
||||
too_many_client_ids = ["{}".format(i) for i in range(101)]
|
||||
too_many_client_ids = [f"{i}" for i in range(101)]
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_open_id_connect_provider(
|
||||
Url="http://example.org",
|
||||
@ -155,7 +155,7 @@ def test_delete_open_id_connect_provider():
|
||||
client.get_open_id_connect_provider.when.called_with(
|
||||
OpenIDConnectProviderArn=open_id_arn
|
||||
).should.throw(
|
||||
ClientError, "OpenIDConnect Provider not found for arn {}".format(open_id_arn)
|
||||
ClientError, f"OpenIDConnect Provider not found for arn {open_id_arn}"
|
||||
)
|
||||
|
||||
# deleting a non existing provider should be successful
|
||||
@ -206,13 +206,11 @@ def test_get_open_id_connect_provider_errors():
|
||||
)
|
||||
open_id_arn = response["OpenIDConnectProviderArn"]
|
||||
|
||||
unknown_arn = open_id_arn + "-not-existing"
|
||||
client.get_open_id_connect_provider.when.called_with(
|
||||
OpenIDConnectProviderArn=open_id_arn + "-not-existing"
|
||||
OpenIDConnectProviderArn=unknown_arn
|
||||
).should.throw(
|
||||
ClientError,
|
||||
"OpenIDConnect Provider not found for arn {}".format(
|
||||
open_id_arn + "-not-existing"
|
||||
),
|
||||
ClientError, f"OpenIDConnect Provider not found for arn {unknown_arn}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -22,9 +22,7 @@ def test_get_all_server_certs():
|
||||
certs.should.have.length_of(1)
|
||||
cert1 = certs[0]
|
||||
cert1["ServerCertificateName"].should.equal("certname")
|
||||
cert1["Arn"].should.equal(
|
||||
"arn:aws:iam::{}:server-certificate/certname".format(ACCOUNT_ID)
|
||||
)
|
||||
cert1["Arn"].should.equal(f"arn:aws:iam::{ACCOUNT_ID}:server-certificate/certname")
|
||||
|
||||
|
||||
@mock_iam
|
||||
@ -59,7 +57,7 @@ def test_get_server_cert():
|
||||
metadata["Path"].should.equal("/")
|
||||
metadata["ServerCertificateName"].should.equal("certname")
|
||||
metadata["Arn"].should.equal(
|
||||
"arn:aws:iam::{}:server-certificate/certname".format(ACCOUNT_ID)
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:server-certificate/certname"
|
||||
)
|
||||
metadata.should.have.key("ServerCertificateId")
|
||||
metadata["UploadDate"].should.be.a(datetime)
|
||||
|
@ -16,25 +16,25 @@ def test_endpoints():
|
||||
endpoint = client.describe_endpoint(endpointType="iot:Data")
|
||||
endpoint.should.have.key("endpointAddress").which.should_not.contain("ats")
|
||||
endpoint.should.have.key("endpointAddress").which.should.contain(
|
||||
"iot.{}.amazonaws.com".format(region_name)
|
||||
f"iot.{region_name}.amazonaws.com"
|
||||
)
|
||||
|
||||
# iot:Data-ATS
|
||||
endpoint = client.describe_endpoint(endpointType="iot:Data-ATS")
|
||||
endpoint.should.have.key("endpointAddress").which.should.contain(
|
||||
"ats.iot.{}.amazonaws.com".format(region_name)
|
||||
f"ats.iot.{region_name}.amazonaws.com"
|
||||
)
|
||||
|
||||
# iot:Data-ATS
|
||||
endpoint = client.describe_endpoint(endpointType="iot:CredentialProvider")
|
||||
endpoint.should.have.key("endpointAddress").which.should.contain(
|
||||
"credentials.iot.{}.amazonaws.com".format(region_name)
|
||||
f"credentials.iot.{region_name}.amazonaws.com"
|
||||
)
|
||||
|
||||
# iot:Data-ATS
|
||||
endpoint = client.describe_endpoint(endpointType="iot:Jobs")
|
||||
endpoint.should.have.key("endpointAddress").which.should.contain(
|
||||
"jobs.iot.{}.amazonaws.com".format(region_name)
|
||||
f"jobs.iot.{region_name}.amazonaws.com"
|
||||
)
|
||||
|
||||
# raise InvalidRequestException
|
||||
|
@ -206,7 +206,7 @@ def test_delete_certificate_validation():
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.delete_certificate(certificateId=cert_id)
|
||||
e.value.response["Error"]["Message"].should.contain(
|
||||
"Things must be detached before deletion (arn: %s)" % cert_arn
|
||||
f"Things must be detached before deletion (arn: {cert_arn})"
|
||||
)
|
||||
res = client.list_certificates()
|
||||
res.should.have.key("certificates").which.should.have.length_of(1)
|
||||
@ -215,7 +215,7 @@ def test_delete_certificate_validation():
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.delete_certificate(certificateId=cert_id)
|
||||
e.value.response["Error"]["Message"].should.contain(
|
||||
"Certificate policies must be detached before deletion (arn: %s)" % cert_arn
|
||||
f"Certificate policies must be detached before deletion (arn: {cert_arn})"
|
||||
)
|
||||
res = client.list_certificates()
|
||||
res.should.have.key("certificates").which.should.have.length_of(1)
|
||||
|
@ -208,7 +208,7 @@ def test_policy_versions(iot_client):
|
||||
)
|
||||
err = exc.value.response["Error"]
|
||||
err["Message"].should.equal(
|
||||
"The policy %s already has the maximum number of versions (5)" % policy_name
|
||||
f"The policy {policy_name} already has the maximum number of versions (5)"
|
||||
)
|
||||
|
||||
iot_client.delete_policy_version(policyName=policy_name, policyVersionId="1")
|
||||
|
@ -44,7 +44,7 @@ def test_list_attached_policies(url_encode_arn):
|
||||
if url_encode_arn:
|
||||
certificate_arn = quote(certificate_arn, safe="")
|
||||
|
||||
result = test_client.post("/attached-policies/{}".format(certificate_arn))
|
||||
result = test_client.post(f"/attached-policies/{certificate_arn}")
|
||||
result.status_code.should.equal(200)
|
||||
result_dict = json.loads(result.data.decode("utf-8"))
|
||||
result_dict["policies"][0]["policyName"].should.equal("my-policy")
|
||||
|
@ -18,7 +18,7 @@ def test_iotdata_list():
|
||||
|
||||
# just making sure that server is up
|
||||
thing_name = "nothing"
|
||||
res = test_client.get("/things/{}/shadow".format(thing_name))
|
||||
res = test_client.get(f"/things/{thing_name}/shadow")
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
|
||||
@ -37,5 +37,5 @@ def test_publish(url_encode_topic):
|
||||
topic = "test/topic"
|
||||
topic_for_path = quote(topic, safe="") if url_encode_topic else topic
|
||||
|
||||
result = test_client.post("/topics/{}".format(topic_for_path))
|
||||
result = test_client.post(f"/topics/{topic_for_path}")
|
||||
result.status_code.should.equal(200)
|
||||
|
@ -97,7 +97,7 @@ def test_list_many_streams():
|
||||
conn = boto3.client("kinesis", region_name="us-west-2")
|
||||
|
||||
for i in range(11):
|
||||
conn.create_stream(StreamName="stream%d" % i, ShardCount=1)
|
||||
conn.create_stream(StreamName=f"stream{i}", ShardCount=1)
|
||||
|
||||
resp = conn.list_streams()
|
||||
stream_names = resp["StreamNames"]
|
||||
@ -124,7 +124,7 @@ def test_describe_stream_summary():
|
||||
stream["StreamName"].should.equal(stream_name)
|
||||
stream["OpenShardCount"].should.equal(shard_count)
|
||||
stream["StreamARN"].should.equal(
|
||||
"arn:aws:kinesis:us-west-2:{}:stream/{}".format(ACCOUNT_ID, stream_name)
|
||||
f"arn:aws:kinesis:us-west-2:{ACCOUNT_ID}:stream/{stream_name}"
|
||||
)
|
||||
stream["StreamStatus"].should.equal("ACTIVE")
|
||||
|
||||
|
@ -23,9 +23,7 @@ def test_get_hls_streaming_session_url():
|
||||
endpoint_url=data_endpoint,
|
||||
)
|
||||
res = client.get_hls_streaming_session_url(StreamName=stream_name)
|
||||
reg_exp = r"^{}/hls/v1/getHLSMasterPlaylist.m3u8\?SessionToken\=.+$".format(
|
||||
data_endpoint
|
||||
)
|
||||
reg_exp = rf"^{data_endpoint}/hls/v1/getHLSMasterPlaylist.m3u8\?SessionToken\=.+$"
|
||||
res.should.have.key("HLSStreamingSessionURL").which.should.match(reg_exp)
|
||||
|
||||
|
||||
@ -47,9 +45,7 @@ def test_get_dash_streaming_session_url():
|
||||
endpoint_url=data_endpoint,
|
||||
)
|
||||
res = client.get_dash_streaming_session_url(StreamName=stream_name)
|
||||
reg_exp = r"^{}/dash/v1/getDASHManifest.mpd\?SessionToken\=.+$".format(
|
||||
data_endpoint
|
||||
)
|
||||
reg_exp = rf"^{data_endpoint}/dash/v1/getDASHManifest.mpd\?SessionToken\=.+$"
|
||||
res.should.have.key("DASHStreamingSessionURL").which.should.match(reg_exp)
|
||||
|
||||
|
||||
|
@ -65,9 +65,7 @@ def test_create_key():
|
||||
)
|
||||
|
||||
key["KeyMetadata"]["Arn"].should.equal(
|
||||
"arn:aws:kms:us-east-1:{}:key/{}".format(
|
||||
ACCOUNT_ID, key["KeyMetadata"]["KeyId"]
|
||||
)
|
||||
f"arn:aws:kms:us-east-1:{ACCOUNT_ID}:key/{key['KeyMetadata']['KeyId']}"
|
||||
)
|
||||
key["KeyMetadata"]["AWSAccountId"].should.equal(ACCOUNT_ID)
|
||||
key["KeyMetadata"]["CreationDate"].should.be.a(datetime)
|
||||
@ -262,7 +260,7 @@ def test__create_alias__can_create_multiple_aliases_for_same_key_id():
|
||||
aliases = client.list_aliases(KeyId=key_id)["Aliases"]
|
||||
|
||||
for name in alias_names:
|
||||
alias_arn = "arn:aws:kms:us-east-1:{}:{}".format(ACCOUNT_ID, name)
|
||||
alias_arn = f"arn:aws:kms:us-east-1:{ACCOUNT_ID}:{name}"
|
||||
aliases.should.contain(
|
||||
{"AliasName": name, "AliasArn": alias_arn, "TargetKeyId": key_id}
|
||||
)
|
||||
@ -278,8 +276,8 @@ def test_list_aliases():
|
||||
aliases.should.have.length_of(14)
|
||||
default_alias_names = ["aws/ebs", "aws/s3", "aws/redshift", "aws/rds"]
|
||||
for name in default_alias_names:
|
||||
full_name = "alias/{}".format(name)
|
||||
arn = "arn:aws:kms:{}:{}:{}".format(region, ACCOUNT_ID, full_name)
|
||||
full_name = f"alias/{name}"
|
||||
arn = f"arn:aws:kms:{region}:{ACCOUNT_ID}:{full_name}"
|
||||
aliases.should.contain({"AliasName": full_name, "AliasArn": arn})
|
||||
|
||||
|
||||
@ -991,9 +989,7 @@ def test__create_alias__raises_if_alias_has_restricted_characters(name):
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("ValidationException")
|
||||
err["Message"].should.equal(
|
||||
"1 validation error detected: Value '{}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$".format(
|
||||
name
|
||||
)
|
||||
f"1 validation error detected: Value '{name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$"
|
||||
)
|
||||
|
||||
|
||||
|
@ -158,9 +158,9 @@ def test_put_subscription_filter_with_lambda():
|
||||
msg_showed_up, received_message = _wait_for_log_msg(
|
||||
client_logs, "/aws/lambda/test", "awslogs"
|
||||
)
|
||||
assert msg_showed_up, "CloudWatch log event was not found. All logs: {}".format(
|
||||
received_message
|
||||
)
|
||||
assert (
|
||||
msg_showed_up
|
||||
), f"CloudWatch log event was not found. All logs: {received_message}"
|
||||
|
||||
data = json.loads(received_message)["awslogs"]["data"]
|
||||
response = json.loads(
|
||||
@ -229,9 +229,9 @@ def test_subscription_filter_applies_to_new_streams():
|
||||
msg_showed_up, received_message = _wait_for_log_msg(
|
||||
client_logs, "/aws/lambda/test", "awslogs"
|
||||
)
|
||||
assert msg_showed_up, "CloudWatch log event was not found. All logs: {}".format(
|
||||
received_message
|
||||
)
|
||||
assert (
|
||||
msg_showed_up
|
||||
), f"CloudWatch log event was not found. All logs: {received_message}"
|
||||
|
||||
data = json.loads(received_message)["awslogs"]["data"]
|
||||
response = json.loads(
|
||||
|
@ -1087,7 +1087,7 @@ def test_describe_log_streams_paging():
|
||||
resp["logStreams"].should.have.length_of(2)
|
||||
resp["logStreams"][0]["arn"].should.contain(log_group_name)
|
||||
resp["nextToken"].should.equal(
|
||||
"{}@{}".format(log_group_name, resp["logStreams"][1]["logStreamName"])
|
||||
f"{log_group_name}@{resp['logStreams'][1]['logStreamName']}"
|
||||
)
|
||||
|
||||
resp = client.describe_log_streams(
|
||||
@ -1096,7 +1096,7 @@ def test_describe_log_streams_paging():
|
||||
resp["logStreams"].should.have.length_of(1)
|
||||
resp["logStreams"][0]["arn"].should.contain(log_group_name)
|
||||
resp["nextToken"].should.equal(
|
||||
"{}@{}".format(log_group_name, resp["logStreams"][0]["logStreamName"])
|
||||
f"{log_group_name}@{resp['logStreams'][0]['logStreamName']}"
|
||||
)
|
||||
|
||||
resp = client.describe_log_streams(
|
||||
@ -1170,7 +1170,7 @@ def test_get_too_many_log_events(nr_of_events):
|
||||
err["Code"].should.equal("InvalidParameterException")
|
||||
err["Message"].should.contain("1 validation error detected")
|
||||
err["Message"].should.contain(
|
||||
"Value '{}' at 'limit' failed to satisfy constraint".format(nr_of_events)
|
||||
f"Value '{nr_of_events}' at 'limit' failed to satisfy constraint"
|
||||
)
|
||||
err["Message"].should.contain("Member must have value less than or equal to 10000")
|
||||
|
||||
@ -1194,7 +1194,7 @@ def test_filter_too_many_log_events(nr_of_events):
|
||||
err["Code"].should.equal("InvalidParameterException")
|
||||
err["Message"].should.contain("1 validation error detected")
|
||||
err["Message"].should.contain(
|
||||
"Value '{}' at 'limit' failed to satisfy constraint".format(nr_of_events)
|
||||
f"Value '{nr_of_events}' at 'limit' failed to satisfy constraint"
|
||||
)
|
||||
err["Message"].should.contain("Member must have value less than or equal to 10000")
|
||||
|
||||
@ -1209,7 +1209,7 @@ def test_describe_too_many_log_groups(nr_of_groups):
|
||||
err["Code"].should.equal("InvalidParameterException")
|
||||
err["Message"].should.contain("1 validation error detected")
|
||||
err["Message"].should.contain(
|
||||
"Value '{}' at 'limit' failed to satisfy constraint".format(nr_of_groups)
|
||||
f"Value '{nr_of_groups}' at 'limit' failed to satisfy constraint"
|
||||
)
|
||||
err["Message"].should.contain("Member must have value less than or equal to 50")
|
||||
|
||||
@ -1226,7 +1226,7 @@ def test_describe_too_many_log_streams(nr_of_streams):
|
||||
err["Code"].should.equal("InvalidParameterException")
|
||||
err["Message"].should.contain("1 validation error detected")
|
||||
err["Message"].should.contain(
|
||||
"Value '{}' at 'limit' failed to satisfy constraint".format(nr_of_streams)
|
||||
f"Value '{nr_of_streams}' at 'limit' failed to satisfy constraint"
|
||||
)
|
||||
err["Message"].should.contain("Member must have value less than or equal to 50")
|
||||
|
||||
@ -1242,9 +1242,7 @@ def test_create_log_group_invalid_name_length(length):
|
||||
err["Code"].should.equal("InvalidParameterException")
|
||||
err["Message"].should.contain("1 validation error detected")
|
||||
err["Message"].should.contain(
|
||||
"Value '{}' at 'logGroupName' failed to satisfy constraint".format(
|
||||
log_group_name
|
||||
)
|
||||
f"Value '{log_group_name}' at 'logGroupName' failed to satisfy constraint"
|
||||
)
|
||||
err["Message"].should.contain("Member must have length less than or equal to 512")
|
||||
|
||||
@ -1263,7 +1261,7 @@ def test_describe_log_streams_invalid_order_by(invalid_orderby):
|
||||
err["Code"].should.equal("InvalidParameterException")
|
||||
err["Message"].should.contain("1 validation error detected")
|
||||
err["Message"].should.contain(
|
||||
"Value '{}' at 'orderBy' failed to satisfy constraint".format(invalid_orderby)
|
||||
f"Value '{invalid_orderby}' at 'orderBy' failed to satisfy constraint"
|
||||
)
|
||||
err["Message"].should.contain(
|
||||
"Member must satisfy enum value set: [LogStreamName, LastEventTime]"
|
||||
|
@ -82,9 +82,7 @@ class TestLogFilterParameters(TestLogFilter):
|
||||
timestamp = int(unix_time_millis(datetime.utcnow()))
|
||||
messages = []
|
||||
for i in range(25):
|
||||
messages.append(
|
||||
{"message": "Message number {}".format(i), "timestamp": timestamp}
|
||||
)
|
||||
messages.append({"message": f"Message number {i}", "timestamp": timestamp})
|
||||
timestamp += 100
|
||||
|
||||
self.conn.put_log_events(
|
||||
|
@ -171,7 +171,7 @@ def test_create_another_member_withopts():
|
||||
)
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("InvalidRequestException")
|
||||
err["Message"].should.contain("Invitation {0} not valid".format(invitation_id))
|
||||
err["Message"].should.contain(f"Invitation {invitation_id} not valid")
|
||||
|
||||
# Delete member 2
|
||||
conn.delete_member(NetworkId=network_id, MemberId=member_id2)
|
||||
@ -186,7 +186,7 @@ def test_create_another_member_withopts():
|
||||
conn.get_member(NetworkId=network_id, MemberId=member_id2)
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("ResourceNotFoundException")
|
||||
err["Message"].should.contain("Member {0} not found".format(member_id2))
|
||||
err["Message"].should.contain(f"Member {member_id2} not found")
|
||||
|
||||
# Delete member 1
|
||||
conn.delete_member(NetworkId=network_id, MemberId=member_id)
|
||||
@ -422,9 +422,7 @@ def test_create_another_member_alreadyhave():
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("InvalidRequestException")
|
||||
err["Message"].should.contain(
|
||||
"Member name {0} already exists in network {1}".format(
|
||||
"testmember1", network_id
|
||||
)
|
||||
f"Member name testmember1 already exists in network {network_id}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -78,7 +78,7 @@ def test_create_node():
|
||||
conn.get_node(NetworkId=network_id, MemberId=member_id, NodeId=node_id)
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("ResourceNotFoundException")
|
||||
err["Message"].should.contain("Node {0} not found".format(node_id))
|
||||
err["Message"].should.contain(f"Node {node_id} not found")
|
||||
|
||||
|
||||
@mock_managedblockchain
|
||||
@ -147,7 +147,7 @@ def test_create_node_standard_edition():
|
||||
conn.list_nodes(NetworkId=network_id, MemberId=member_id)
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("ResourceNotFoundException")
|
||||
err["Message"].should.contain("Member {0} not found".format(member_id))
|
||||
err["Message"].should.contain(f"Member {member_id} not found")
|
||||
|
||||
|
||||
@mock_managedblockchain
|
||||
@ -196,7 +196,7 @@ def test_create_too_many_nodes():
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("ResourceLimitExceededException")
|
||||
err["Message"].should.contain(
|
||||
"Maximum number of nodes exceeded in member {0}".format(member_id)
|
||||
f"Maximum number of nodes exceeded in member {member_id}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -313,7 +313,7 @@ def test_vote_on_proposal_expiredproposal():
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("InvalidRequestException")
|
||||
err["Message"].should.contain(
|
||||
"Proposal {0} is expired and you cannot vote on it.".format(proposal_id)
|
||||
f"Proposal {proposal_id} is expired and you cannot vote on it."
|
||||
)
|
||||
|
||||
# Get proposal details - should be EXPIRED
|
||||
@ -633,7 +633,7 @@ def test_vote_on_proposal_alreadyvoted():
|
||||
err = ex.value.response["Error"]
|
||||
err["Code"].should.equal("ResourceAlreadyExistsException")
|
||||
err["Message"].should.contain(
|
||||
"Member {0} has already voted on proposal {1}.".format(member_id, proposal_id)
|
||||
f"Member {member_id} has already voted on proposal {proposal_id}."
|
||||
)
|
||||
|
||||
|
||||
|
@ -11,7 +11,7 @@ region = "eu-west-1"
|
||||
def _create_input_config(name, **kwargs):
|
||||
role_arn = kwargs.get(
|
||||
"role_arn",
|
||||
"arn:aws:iam::{}:role/TestMediaLiveInputCreateRole".format(ACCOUNT_ID),
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/TestMediaLiveInputCreateRole",
|
||||
)
|
||||
input_type = kwargs.get("type", "RTP_PUSH")
|
||||
request_id = kwargs.get("request_id", uuid4().hex)
|
||||
@ -52,7 +52,7 @@ def _create_input_config(name, **kwargs):
|
||||
def _create_channel_config(name, **kwargs):
|
||||
role_arn = kwargs.get(
|
||||
"role_arn",
|
||||
"arn:aws:iam::{}:role/TestMediaLiveChannelCreateRole".format(ACCOUNT_ID),
|
||||
f"arn:aws:iam::{ACCOUNT_ID}:role/TestMediaLiveChannelCreateRole",
|
||||
)
|
||||
input_id = kwargs.get("input_id", "an-attachment-id")
|
||||
input_settings = kwargs.get(
|
||||
@ -114,7 +114,7 @@ def test_create_channel_succeeds():
|
||||
|
||||
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
response["Channel"]["Arn"].should.equal(
|
||||
"arn:aws:medialive:channel:{}".format(response["Channel"]["Id"])
|
||||
f"arn:aws:medialive:channel:{response['Channel']['Id']}"
|
||||
)
|
||||
response["Channel"]["Destinations"].should.equal(channel_config["Destinations"])
|
||||
response["Channel"]["EncoderSettings"].should.equal(
|
||||
@ -175,7 +175,7 @@ def test_describe_channel_succeeds():
|
||||
)
|
||||
|
||||
describe_response["Arn"].should.equal(
|
||||
"arn:aws:medialive:channel:{}".format(describe_response["Id"])
|
||||
f"arn:aws:medialive:channel:{describe_response['Id']}"
|
||||
)
|
||||
describe_response["Destinations"].should.equal(channel_config["Destinations"])
|
||||
describe_response["EncoderSettings"].should.equal(channel_config["EncoderSettings"])
|
||||
@ -256,7 +256,7 @@ def test_create_input_succeeds():
|
||||
r_input = create_response["Input"]
|
||||
input_id = r_input["Id"]
|
||||
assert len(input_id) > 1
|
||||
r_input["Arn"].should.equal("arn:aws:medialive:input:{}".format(r_input["Id"]))
|
||||
r_input["Arn"].should.equal(f"arn:aws:medialive:input:{r_input['Id']}")
|
||||
r_input["Name"].should.equal(input_name)
|
||||
r_input["AttachedChannels"].should.equal([])
|
||||
r_input["Destinations"].should.equal(input_config["Destinations"])
|
||||
|
@ -60,9 +60,7 @@ def test_create_channel_succeeds():
|
||||
|
||||
response = client.create_channel(**channel_config)
|
||||
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
response["Arn"].should.equal(
|
||||
"arn:aws:mediapackage:channel:{}".format(response["Id"])
|
||||
)
|
||||
response["Arn"].should.equal(f"arn:aws:mediapackage:channel:{response['Id']}")
|
||||
response["Description"].should.equal("Awesome channel!")
|
||||
response["Id"].should.equal("channel-id")
|
||||
response["Tags"]["Customer"].should.equal("moto")
|
||||
@ -76,7 +74,7 @@ def test_describe_channel_succeeds():
|
||||
create_response = client.create_channel(**channel_config)
|
||||
describe_response = client.describe_channel(Id=create_response["Id"])
|
||||
describe_response["Arn"].should.equal(
|
||||
"arn:aws:mediapackage:channel:{}".format(describe_response["Id"])
|
||||
f"arn:aws:mediapackage:channel:{describe_response['Id']}"
|
||||
)
|
||||
describe_response["Description"].should.equal(channel_config["Description"])
|
||||
describe_response["Tags"]["Customer"].should.equal("moto")
|
||||
@ -90,7 +88,7 @@ def test_describe_unknown_channel_throws_error():
|
||||
client.describe_channel(Id=channel_id)
|
||||
err = err.value.response["Error"]
|
||||
err["Code"].should.equal("NotFoundException")
|
||||
err["Message"].should.equal("channel with id={} not found".format(str(channel_id)))
|
||||
err["Message"].should.equal(f"channel with id={channel_id} not found")
|
||||
|
||||
|
||||
@mock_mediapackage
|
||||
@ -101,7 +99,7 @@ def test_delete_unknown_channel_throws_error():
|
||||
client.delete_channel(Id=channel_id)
|
||||
err = err.value.response["Error"]
|
||||
err["Code"].should.equal("NotFoundException")
|
||||
err["Message"].should.equal("channel with id={} not found".format(str(channel_id)))
|
||||
err["Message"].should.equal(f"channel with id={channel_id} not found")
|
||||
|
||||
|
||||
@mock_mediapackage
|
||||
@ -131,7 +129,7 @@ def test_list_channels_succeds():
|
||||
len(channels_list).should.equal(1)
|
||||
first_channel = channels_list[0]
|
||||
first_channel["Arn"].should.equal(
|
||||
"arn:aws:mediapackage:channel:{}".format(first_channel["Id"])
|
||||
f"arn:aws:mediapackage:channel:{first_channel['Id']}"
|
||||
)
|
||||
first_channel["Description"].should.equal(channel_config["Description"])
|
||||
first_channel["Tags"]["Customer"].should.equal("moto")
|
||||
@ -145,7 +143,7 @@ def test_create_origin_endpoint_succeeds():
|
||||
response = client.create_origin_endpoint(**origin_endpoint_config)
|
||||
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
response["Arn"].should.equal(
|
||||
"arn:aws:mediapackage:origin_endpoint:{}".format(response["Id"])
|
||||
f"arn:aws:mediapackage:origin_endpoint:{response['Id']}"
|
||||
)
|
||||
response["ChannelId"].should.equal(origin_endpoint_config["ChannelId"])
|
||||
response["Description"].should.equal(origin_endpoint_config["Description"])
|
||||
@ -162,16 +160,14 @@ def test_describe_origin_endpoint_succeeds():
|
||||
describe_response = client.describe_origin_endpoint(Id=create_response["Id"])
|
||||
describe_response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
describe_response["Arn"].should.equal(
|
||||
"arn:aws:mediapackage:origin_endpoint:{}".format(describe_response["Id"])
|
||||
f"arn:aws:mediapackage:origin_endpoint:{describe_response['Id']}"
|
||||
)
|
||||
describe_response["ChannelId"].should.equal(origin_endpoint_config["ChannelId"])
|
||||
describe_response["Description"].should.equal(origin_endpoint_config["Description"])
|
||||
describe_response["HlsPackage"].should.equal(origin_endpoint_config["HlsPackage"])
|
||||
describe_response["Origination"].should.equal("ALLOW")
|
||||
describe_response["Url"].should.equal(
|
||||
"https://origin-endpoint.mediapackage.{}.amazonaws.com/{}".format(
|
||||
region, describe_response["Id"]
|
||||
)
|
||||
f"https://origin-endpoint.mediapackage.{region}.amazonaws.com/{describe_response['Id']}"
|
||||
)
|
||||
|
||||
|
||||
@ -183,9 +179,7 @@ def test_describe_unknown_origin_endpoint_throws_error():
|
||||
client.describe_origin_endpoint(Id=channel_id)
|
||||
err = err.value.response["Error"]
|
||||
err["Code"].should.equal("NotFoundException")
|
||||
err["Message"].should.equal(
|
||||
"origin endpoint with id={} not found".format(str(channel_id))
|
||||
)
|
||||
err["Message"].should.equal(f"origin endpoint with id={channel_id} not found")
|
||||
|
||||
|
||||
@mock_mediapackage
|
||||
@ -213,9 +207,7 @@ def test_delete_unknown_origin_endpoint_throws_error():
|
||||
client.delete_origin_endpoint(Id=channel_id)
|
||||
err = err.value.response["Error"]
|
||||
err["Code"].should.equal("NotFoundException")
|
||||
err["Message"].should.equal(
|
||||
"origin endpoint with id={} not found".format(str(channel_id))
|
||||
)
|
||||
err["Message"].should.equal(f"origin endpoint with id={channel_id} not found")
|
||||
|
||||
|
||||
@mock_mediapackage
|
||||
@ -244,9 +236,7 @@ def test_update_unknown_origin_endpoint_throws_error():
|
||||
)
|
||||
err = err.value.response["Error"]
|
||||
err["Code"].should.equal("NotFoundException")
|
||||
err["Message"].should.equal(
|
||||
"origin endpoint with id={} not found".format(str(channel_id))
|
||||
)
|
||||
err["Message"].should.equal(f"origin endpoint with id={channel_id} not found")
|
||||
|
||||
|
||||
@mock_mediapackage
|
||||
@ -261,7 +251,7 @@ def test_list_origin_endpoint_succeeds():
|
||||
len(origin_endpoints_list).should.equal(1)
|
||||
first_origin_endpoint = origin_endpoints_list[0]
|
||||
first_origin_endpoint["Arn"].should.equal(
|
||||
"arn:aws:mediapackage:origin_endpoint:{}".format(first_origin_endpoint["Id"])
|
||||
f"arn:aws:mediapackage:origin_endpoint:{first_origin_endpoint['Id']}"
|
||||
)
|
||||
first_origin_endpoint["ChannelId"].should.equal(origin_endpoint_config["ChannelId"])
|
||||
first_origin_endpoint["Description"].should.equal(
|
||||
|
@ -16,9 +16,7 @@ def test_create_container_succeeds():
|
||||
)
|
||||
container = response["Container"]
|
||||
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
container["ARN"].should.equal(
|
||||
"arn:aws:mediastore:container:{}".format(container["Name"])
|
||||
)
|
||||
container["ARN"].should.equal(f"arn:aws:mediastore:container:{container['Name']}")
|
||||
container["Name"].should.equal("Awesome container!")
|
||||
container["Status"].should.equal("CREATING")
|
||||
|
||||
@ -33,9 +31,7 @@ def test_describe_container_succeeds():
|
||||
response = client.describe_container(ContainerName=container_name)
|
||||
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
container = response["Container"]
|
||||
container["ARN"].should.equal(
|
||||
"arn:aws:mediastore:container:{}".format(container["Name"])
|
||||
)
|
||||
container["ARN"].should.equal(f"arn:aws:mediastore:container:{container_name}")
|
||||
container["Name"].should.equal("Awesome container!")
|
||||
container["Status"].should.equal("ACTIVE")
|
||||
|
||||
|
@ -22,7 +22,7 @@ def test_describe_stacks():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
for i in range(1, 4):
|
||||
client.create_stack(
|
||||
Name="test_stack_{0}".format(i),
|
||||
Name=f"test_stack_{i}",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
|
@ -1614,9 +1614,7 @@ def test_register_delegated_administrator():
|
||||
admin = response["DelegatedAdministrators"][0]
|
||||
admin["Id"].should.equal(account_id)
|
||||
admin["Arn"].should.equal(
|
||||
"arn:aws:organizations::{0}:account/{1}/{2}".format(
|
||||
ACCOUNT_ID, org_id, account_id
|
||||
)
|
||||
f"arn:aws:organizations::{ACCOUNT_ID}:account/{org_id}/{account_id}"
|
||||
)
|
||||
admin["Email"].should.equal(mockemail)
|
||||
admin["Name"].should.equal(mockname)
|
||||
@ -1740,9 +1738,7 @@ def test_list_delegated_administrators():
|
||||
admin = response["DelegatedAdministrators"][0]
|
||||
admin["Id"].should.equal(account_id_1)
|
||||
admin["Arn"].should.equal(
|
||||
"arn:aws:organizations::{0}:account/{1}/{2}".format(
|
||||
ACCOUNT_ID, org_id, account_id_1
|
||||
)
|
||||
f"arn:aws:organizations::{ACCOUNT_ID}:account/{org_id}/{account_id_1}"
|
||||
)
|
||||
admin["Email"].should.equal(mockemail)
|
||||
admin["Name"].should.equal(mockname)
|
||||
|
@ -38,7 +38,7 @@ def test_create_resource_share():
|
||||
name="test",
|
||||
allowExternalPrincipals=False,
|
||||
resourceArns=[
|
||||
"arn:aws:ec2:us-east-1:{}:transit-gateway/tgw-123456789".format(ACCOUNT_ID)
|
||||
f"arn:aws:ec2:us-east-1:{ACCOUNT_ID}:transit-gateway/tgw-123456789"
|
||||
],
|
||||
)
|
||||
|
||||
@ -80,7 +80,7 @@ def test_create_resource_share_errors():
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_resource_share(
|
||||
name="test", resourceArns=["arn:aws:iam::{}:role/test".format(ACCOUNT_ID)]
|
||||
name="test", resourceArns=[f"arn:aws:iam::{ACCOUNT_ID}:role/test"]
|
||||
)
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("CreateResourceShare")
|
||||
@ -97,9 +97,7 @@ def test_create_resource_share_errors():
|
||||
name="test",
|
||||
principals=["invalid"],
|
||||
resourceArns=[
|
||||
"arn:aws:ec2:us-east-1:{}:transit-gateway/tgw-123456789".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ec2:us-east-1:{ACCOUNT_ID}:transit-gateway/tgw-123456789"
|
||||
],
|
||||
)
|
||||
ex = e.value
|
||||
@ -129,7 +127,7 @@ def test_create_resource_share_with_organization():
|
||||
name="test",
|
||||
principals=[org_arn],
|
||||
resourceArns=[
|
||||
"arn:aws:ec2:us-east-1:{}:transit-gateway/tgw-123456789".format(ACCOUNT_ID)
|
||||
f"arn:aws:ec2:us-east-1:{ACCOUNT_ID}:transit-gateway/tgw-123456789"
|
||||
],
|
||||
)
|
||||
|
||||
@ -142,7 +140,7 @@ def test_create_resource_share_with_organization():
|
||||
name="test",
|
||||
principals=[ou_arn],
|
||||
resourceArns=[
|
||||
"arn:aws:ec2:us-east-1:{}:transit-gateway/tgw-123456789".format(ACCOUNT_ID)
|
||||
f"arn:aws:ec2:us-east-1:{ACCOUNT_ID}:transit-gateway/tgw-123456789"
|
||||
],
|
||||
)
|
||||
|
||||
@ -165,13 +163,9 @@ def test_create_resource_share_with_organization_errors():
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_resource_share(
|
||||
name="test",
|
||||
principals=[
|
||||
"arn:aws:organizations::{}:organization/o-unknown".format(ACCOUNT_ID)
|
||||
],
|
||||
principals=[f"arn:aws:organizations::{ACCOUNT_ID}:organization/o-unknown"],
|
||||
resourceArns=[
|
||||
"arn:aws:ec2:us-east-1:{}:transit-gateway/tgw-123456789".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ec2:us-east-1:{ACCOUNT_ID}:transit-gateway/tgw-123456789"
|
||||
],
|
||||
)
|
||||
ex = e.value
|
||||
@ -187,13 +181,9 @@ def test_create_resource_share_with_organization_errors():
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.create_resource_share(
|
||||
name="test",
|
||||
principals=[
|
||||
"arn:aws:organizations::{}:ou/o-unknown/ou-unknown".format(ACCOUNT_ID)
|
||||
],
|
||||
principals=[f"arn:aws:organizations::{ACCOUNT_ID}:ou/o-unknown/ou-unknown"],
|
||||
resourceArns=[
|
||||
"arn:aws:ec2:us-east-1:{}:transit-gateway/tgw-123456789".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"arn:aws:ec2:us-east-1:{ACCOUNT_ID}:transit-gateway/tgw-123456789"
|
||||
],
|
||||
)
|
||||
ex = e.value
|
||||
@ -284,9 +274,7 @@ def test_update_resource_share_errors():
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.update_resource_share(
|
||||
resourceShareArn="arn:aws:ram:us-east-1:{}:resource-share/not-existing".format(
|
||||
ACCOUNT_ID
|
||||
),
|
||||
resourceShareArn=f"arn:aws:ram:us-east-1:{ACCOUNT_ID}:resource-share/not-existing",
|
||||
name="test-update",
|
||||
)
|
||||
ex = e.value
|
||||
@ -294,9 +282,7 @@ def test_update_resource_share_errors():
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("UnknownResourceException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"ResourceShare arn:aws:ram:us-east-1:{}:resource-share/not-existing could not be found.".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"ResourceShare arn:aws:ram:us-east-1:{ACCOUNT_ID}:resource-share/not-existing could not be found."
|
||||
)
|
||||
|
||||
|
||||
@ -330,18 +316,14 @@ def test_delete_resource_share_errors():
|
||||
# when
|
||||
with pytest.raises(ClientError) as e:
|
||||
client.delete_resource_share(
|
||||
resourceShareArn="arn:aws:ram:us-east-1:{}:resource-share/not-existing".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
resourceShareArn=f"arn:aws:ram:us-east-1:{ACCOUNT_ID}:resource-share/not-existing"
|
||||
)
|
||||
ex = e.value
|
||||
ex.operation_name.should.equal("DeleteResourceShare")
|
||||
ex.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
ex.response["Error"]["Code"].should.contain("UnknownResourceException")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"ResourceShare arn:aws:ram:us-east-1:{}:resource-share/not-existing could not be found.".format(
|
||||
ACCOUNT_ID
|
||||
)
|
||||
f"ResourceShare arn:aws:ram:us-east-1:{ACCOUNT_ID}:resource-share/not-existing could not be found."
|
||||
)
|
||||
|
||||
|
||||
|
@ -15,8 +15,8 @@ class TestDBInstanceFilters(object):
|
||||
cls.mock.start()
|
||||
client = boto3.client("rds", region_name="us-west-2")
|
||||
for i in range(10):
|
||||
instance_identifier = "db-instance-{}".format(i)
|
||||
cluster_identifier = "db-cluster-{}".format(i)
|
||||
instance_identifier = f"db-instance-{i}"
|
||||
cluster_identifier = f"db-cluster-{i}"
|
||||
engine = "postgres" if (i % 3) else "mysql"
|
||||
client.create_db_instance(
|
||||
DBInstanceIdentifier=instance_identifier,
|
||||
@ -200,7 +200,7 @@ class TestDBSnapshotFilters(object):
|
||||
# We'll set up two instances (one postgres, one mysql)
|
||||
# with two snapshots each.
|
||||
for i in range(2):
|
||||
identifier = "db-instance-{}".format(i)
|
||||
identifier = f"db-instance-{i}"
|
||||
engine = "postgres" if i else "mysql"
|
||||
client.create_db_instance(
|
||||
DBInstanceIdentifier=identifier,
|
||||
@ -210,7 +210,7 @@ class TestDBSnapshotFilters(object):
|
||||
for j in range(2):
|
||||
client.create_db_snapshot(
|
||||
DBInstanceIdentifier=identifier,
|
||||
DBSnapshotIdentifier="{}-snapshot-{}".format(identifier, j),
|
||||
DBSnapshotIdentifier=f"{identifier}-snapshot-{j}",
|
||||
)
|
||||
cls.client = client
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user