AWSLambda - clean up tests (#4317)
This commit is contained in:
parent
afa7f40f1e
commit
d08ed937f3
@ -5,10 +5,15 @@ import zipfile
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import mock_cloudformation, mock_iam, mock_lambda, mock_s3, mock_sqs
|
||||
import pytest
|
||||
import re
|
||||
from string import Template
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def random_stack_name():
|
||||
return str(uuid4())[0:6]
|
||||
|
||||
|
||||
def _process_lambda(func_str):
|
||||
zip_output = io.BytesIO()
|
||||
zip_file = zipfile.ZipFile(zip_output, "w", zipfile.ZIP_DEFLATED)
|
||||
@ -74,6 +79,7 @@ def test_lambda_can_be_updated_by_cloudformation():
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
lmbda = boto3.client("lambda", region_name="us-east-1")
|
||||
body2, stack = create_stack(cf, s3)
|
||||
stack_name = re.search(":stack/(.+)/", stack["StackId"]).group(1)
|
||||
created_fn_name = get_created_function_name(cf, stack)
|
||||
# Verify function has been created
|
||||
created_fn = lmbda.get_function(FunctionName=created_fn_name)
|
||||
@ -83,7 +89,7 @@ def test_lambda_can_be_updated_by_cloudformation():
|
||||
created_fn["Configuration"]["Runtime"].should.equal("python3.7")
|
||||
created_fn["Code"]["Location"].should.match("/test1.zip")
|
||||
# Update CF stack
|
||||
cf.update_stack(StackName="teststack", TemplateBody=body2)
|
||||
cf.update_stack(StackName=stack_name, TemplateBody=body2)
|
||||
updated_fn_name = get_created_function_name(cf, stack)
|
||||
# Verify function has been updated
|
||||
updated_fn = lmbda.get_function(FunctionName=updated_fn_name)
|
||||
@ -124,7 +130,7 @@ def test_event_source_mapping_create_from_cloudformation_json():
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
lmbda = boto3.client("lambda", region_name="us-east-1")
|
||||
|
||||
queue = sqs.create_queue(QueueName="test-sqs-queue1")
|
||||
queue = sqs.create_queue(QueueName=str(uuid4())[0:6])
|
||||
|
||||
# Creates lambda
|
||||
_, lambda_stack = create_stack(cf, s3)
|
||||
@ -143,7 +149,7 @@ def test_event_source_mapping_create_from_cloudformation_json():
|
||||
}
|
||||
)
|
||||
|
||||
cf.create_stack(StackName="test-event-source", TemplateBody=template)
|
||||
cf.create_stack(StackName=random_stack_name(), TemplateBody=template)
|
||||
event_sources = lmbda.list_event_source_mappings(FunctionName=created_fn_name)
|
||||
|
||||
event_sources["EventSourceMappings"].should.have.length_of(1)
|
||||
@ -162,7 +168,7 @@ def test_event_source_mapping_delete_stack():
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
lmbda = boto3.client("lambda", region_name="us-east-1")
|
||||
|
||||
queue = sqs.create_queue(QueueName="test-sqs-queue1")
|
||||
queue = sqs.create_queue(QueueName=str(uuid4())[0:6])
|
||||
|
||||
# Creates lambda
|
||||
_, lambda_stack = create_stack(cf, s3)
|
||||
@ -178,7 +184,7 @@ def test_event_source_mapping_delete_stack():
|
||||
}
|
||||
)
|
||||
|
||||
esm_stack = cf.create_stack(StackName="test-event-source", TemplateBody=template)
|
||||
esm_stack = cf.create_stack(StackName=random_stack_name(), TemplateBody=template)
|
||||
event_sources = lmbda.list_event_source_mappings(FunctionName=created_fn_name)
|
||||
|
||||
event_sources["EventSourceMappings"].should.have.length_of(1)
|
||||
@ -199,7 +205,7 @@ def test_event_source_mapping_update_from_cloudformation_json():
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
lmbda = boto3.client("lambda", region_name="us-east-1")
|
||||
|
||||
queue = sqs.create_queue(QueueName="test-sqs-queue1")
|
||||
queue = sqs.create_queue(QueueName=str(uuid4())[0:6])
|
||||
|
||||
# Creates lambda
|
||||
_, lambda_stack = create_stack(cf, s3)
|
||||
@ -218,7 +224,8 @@ def test_event_source_mapping_update_from_cloudformation_json():
|
||||
}
|
||||
)
|
||||
|
||||
cf.create_stack(StackName="test-event-source", TemplateBody=original_template)
|
||||
stack_name = random_stack_name()
|
||||
cf.create_stack(StackName=stack_name, TemplateBody=original_template)
|
||||
event_sources = lmbda.list_event_source_mappings(FunctionName=created_fn_name)
|
||||
original_esm = event_sources["EventSourceMappings"][0]
|
||||
|
||||
@ -236,7 +243,7 @@ def test_event_source_mapping_update_from_cloudformation_json():
|
||||
}
|
||||
)
|
||||
|
||||
cf.update_stack(StackName="test-event-source", TemplateBody=new_template)
|
||||
cf.update_stack(StackName=stack_name, TemplateBody=new_template)
|
||||
event_sources = lmbda.list_event_source_mappings(FunctionName=created_fn_name)
|
||||
updated_esm = event_sources["EventSourceMappings"][0]
|
||||
|
||||
@ -254,7 +261,7 @@ def test_event_source_mapping_delete_from_cloudformation_json():
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
lmbda = boto3.client("lambda", region_name="us-east-1")
|
||||
|
||||
queue = sqs.create_queue(QueueName="test-sqs-queue1")
|
||||
queue = sqs.create_queue(QueueName=str(uuid4())[0:6])
|
||||
|
||||
# Creates lambda
|
||||
_, lambda_stack = create_stack(cf, s3)
|
||||
@ -273,7 +280,8 @@ def test_event_source_mapping_delete_from_cloudformation_json():
|
||||
}
|
||||
)
|
||||
|
||||
cf.create_stack(StackName="test-event-source", TemplateBody=original_template)
|
||||
stack_name = random_stack_name()
|
||||
cf.create_stack(StackName=stack_name, TemplateBody=original_template)
|
||||
event_sources = lmbda.list_event_source_mappings(FunctionName=created_fn_name)
|
||||
original_esm = event_sources["EventSourceMappings"][0]
|
||||
|
||||
@ -291,7 +299,7 @@ def test_event_source_mapping_delete_from_cloudformation_json():
|
||||
}
|
||||
)
|
||||
|
||||
cf.update_stack(StackName="test-event-source", TemplateBody=new_template)
|
||||
cf.update_stack(StackName=stack_name, TemplateBody=new_template)
|
||||
event_sources = lmbda.list_event_source_mappings(FunctionName=created_fn_name)
|
||||
|
||||
event_sources["EventSourceMappings"].should.have.length_of(1)
|
||||
@ -304,12 +312,13 @@ def test_event_source_mapping_delete_from_cloudformation_json():
|
||||
|
||||
def create_stack(cf, s3):
|
||||
bucket_name = str(uuid4())
|
||||
stack_name = random_stack_name()
|
||||
s3.create_bucket(Bucket=bucket_name)
|
||||
s3.put_object(Bucket=bucket_name, Key="test1.zip", Body=get_zip_file())
|
||||
s3.put_object(Bucket=bucket_name, Key="test2.zip", Body=get_zip_file())
|
||||
body1 = get_template(bucket_name, "1", "python3.7")
|
||||
body2 = get_template(bucket_name, "2", "python3.8")
|
||||
stack = cf.create_stack(StackName="teststack", TemplateBody=body1)
|
||||
stack = cf.create_stack(StackName=stack_name, TemplateBody=body1)
|
||||
return body2, stack
|
||||
|
||||
|
||||
@ -334,10 +343,12 @@ def get_role_arn():
|
||||
with mock_iam():
|
||||
iam = boto3.client("iam", region_name="us-west-2")
|
||||
try:
|
||||
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
|
||||
except ClientError:
|
||||
return iam.create_role(
|
||||
iam.create_role(
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)["Role"]["Arn"]
|
||||
)
|
||||
except ClientError:
|
||||
pass # Will fail second/third time - difficult to execute once with parallel tests
|
||||
|
||||
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
|
||||
|
File diff suppressed because it is too large
Load Diff
92
tests/test_awslambda/test_lambda_concurrency.py
Normal file
92
tests/test_awslambda/test_lambda_concurrency.py
Normal file
@ -0,0 +1,92 @@
|
||||
import boto3
|
||||
import pytest
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_lambda
|
||||
from uuid import uuid4
|
||||
from .utilities import get_role_name, get_test_zip_file1
|
||||
|
||||
_lambda_region = "us-west-2"
|
||||
boto3.setup_default_session(region_name=_lambda_region)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("key", ["FunctionName", "FunctionArn"])
|
||||
@mock_lambda
|
||||
def test_put_function_concurrency(key):
|
||||
expected_concurrency = 15
|
||||
function_name = str(uuid4())[0:6]
|
||||
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
f = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python3.8",
|
||||
Role=(get_role_name()),
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
name_or_arn = f[key]
|
||||
result = conn.put_function_concurrency(
|
||||
FunctionName=name_or_arn, ReservedConcurrentExecutions=expected_concurrency
|
||||
)
|
||||
|
||||
result["ReservedConcurrentExecutions"].should.equal(expected_concurrency)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("key", ["FunctionName", "FunctionArn"])
|
||||
@mock_lambda
|
||||
def test_delete_function_concurrency(key):
|
||||
function_name = str(uuid4())[0:6]
|
||||
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
f = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python3.8",
|
||||
Role=(get_role_name()),
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
name_or_arn = f[key]
|
||||
conn.put_function_concurrency(
|
||||
FunctionName=name_or_arn, ReservedConcurrentExecutions=15
|
||||
)
|
||||
|
||||
conn.delete_function_concurrency(FunctionName=name_or_arn)
|
||||
result = conn.get_function(FunctionName=function_name)
|
||||
|
||||
result.doesnt.have.key("Concurrency")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("key", ["FunctionName", "FunctionArn"])
|
||||
@mock_lambda
|
||||
def test_get_function_concurrency(key):
|
||||
expected_concurrency = 15
|
||||
function_name = str(uuid4())[0:6]
|
||||
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
f = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python3.8",
|
||||
Role=(get_role_name()),
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
name_or_arn = f[key]
|
||||
conn.put_function_concurrency(
|
||||
FunctionName=name_or_arn, ReservedConcurrentExecutions=expected_concurrency
|
||||
)
|
||||
|
||||
result = conn.get_function_concurrency(FunctionName=name_or_arn)
|
||||
|
||||
result["ReservedConcurrentExecutions"].should.equal(expected_concurrency)
|
477
tests/test_awslambda/test_lambda_eventsourcemapping.py
Normal file
477
tests/test_awslambda/test_lambda_eventsourcemapping.py
Normal file
@ -0,0 +1,477 @@
|
||||
import botocore.client
|
||||
import boto3
|
||||
import json
|
||||
import pytest
|
||||
import time
|
||||
import sure # noqa
|
||||
import uuid
|
||||
|
||||
from moto import (
|
||||
mock_dynamodb2,
|
||||
mock_lambda,
|
||||
mock_logs,
|
||||
mock_sns,
|
||||
mock_sqs,
|
||||
)
|
||||
from uuid import uuid4
|
||||
from .utilities import (
|
||||
get_role_name,
|
||||
get_test_zip_file3,
|
||||
wait_for_log_msg,
|
||||
get_test_zip_file_error,
|
||||
)
|
||||
|
||||
_lambda_region = "us-west-2"
|
||||
boto3.setup_default_session(region_name=_lambda_region)
|
||||
|
||||
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_create_event_source_mapping():
|
||||
function_name = str(uuid4())[0:6]
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName=f"{function_name}_queue")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func["FunctionArn"]
|
||||
)
|
||||
|
||||
assert response["EventSourceArn"] == queue.attributes["QueueArn"]
|
||||
assert response["FunctionArn"] == func["FunctionArn"]
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.parametrize("key", ["FunctionName", "FunctionArn"])
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_invoke_function_from_sqs(key):
|
||||
function_name = str(uuid4())[0:6]
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName=f"{function_name}_queue")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
name_or_arn = func[key]
|
||||
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=name_or_arn
|
||||
)
|
||||
|
||||
assert response["EventSourceArn"] == queue.attributes["QueueArn"]
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
sqs_client = boto3.client("sqs", region_name="us-east-1")
|
||||
sqs_client.send_message(QueueUrl=queue.url, MessageBody="test")
|
||||
|
||||
expected_msg = "get_test_zip_file3 success"
|
||||
log_group = f"/aws/lambda/{function_name}"
|
||||
msg_showed_up, all_logs = wait_for_log_msg(expected_msg, log_group)
|
||||
|
||||
assert msg_showed_up, (
|
||||
expected_msg
|
||||
+ " was not found after sending an SQS message. All logs: "
|
||||
+ str(all_logs)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_dynamodb2
|
||||
def test_invoke_function_from_dynamodb_put():
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
table_name = str(uuid4())[0:6] + "_table"
|
||||
table = dynamodb.create_table(
|
||||
TableName=table_name,
|
||||
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}],
|
||||
StreamSpecification={
|
||||
"StreamEnabled": True,
|
||||
"StreamViewType": "NEW_AND_OLD_IMAGES",
|
||||
},
|
||||
)
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
function_name = str(uuid4())[0:6]
|
||||
func = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function executed after a DynamoDB table is updated",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=table["TableDescription"]["LatestStreamArn"],
|
||||
FunctionName=func["FunctionArn"],
|
||||
)
|
||||
|
||||
assert response["EventSourceArn"] == table["TableDescription"]["LatestStreamArn"]
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
dynamodb.put_item(TableName=table_name, Item={"id": {"S": "item 1"}})
|
||||
|
||||
expected_msg = "get_test_zip_file3 success"
|
||||
log_group = f"/aws/lambda/{function_name}"
|
||||
msg_showed_up, all_logs = wait_for_log_msg(expected_msg, log_group)
|
||||
|
||||
assert msg_showed_up, (
|
||||
expected_msg + " was not found after a DDB insert. All logs: " + str(all_logs)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_dynamodb2
|
||||
def test_invoke_function_from_dynamodb_update():
|
||||
dynamodb = boto3.client("dynamodb", region_name="us-east-1")
|
||||
table_name = str(uuid4())[0:6] + "_table"
|
||||
table = dynamodb.create_table(
|
||||
TableName=table_name,
|
||||
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}],
|
||||
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}],
|
||||
StreamSpecification={
|
||||
"StreamEnabled": True,
|
||||
"StreamViewType": "NEW_AND_OLD_IMAGES",
|
||||
},
|
||||
)
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
function_name = str(uuid4())[0:6]
|
||||
func = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function executed after a DynamoDB table is updated",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
conn.create_event_source_mapping(
|
||||
EventSourceArn=table["TableDescription"]["LatestStreamArn"],
|
||||
FunctionName=func["FunctionArn"],
|
||||
)
|
||||
|
||||
dynamodb.put_item(TableName=table_name, Item={"id": {"S": "item 1"}})
|
||||
log_group = f"/aws/lambda/{function_name}"
|
||||
expected_msg = "get_test_zip_file3 success"
|
||||
msg_showed_up, all_logs = wait_for_log_msg(expected_msg, log_group)
|
||||
assert "Nr_of_records(1)" in all_logs, "Only one item should be inserted"
|
||||
|
||||
dynamodb.update_item(
|
||||
TableName=table_name,
|
||||
Key={"id": {"S": "item 1"}},
|
||||
UpdateExpression="set #attr = :val",
|
||||
ExpressionAttributeNames={"#attr": "new_attr"},
|
||||
ExpressionAttributeValues={":val": {"S": "new_val"}},
|
||||
)
|
||||
msg_showed_up, all_logs = wait_for_log_msg(expected_msg, log_group)
|
||||
|
||||
assert msg_showed_up, (
|
||||
expected_msg + " was not found after updating DDB. All logs: " + str(all_logs)
|
||||
)
|
||||
assert "Nr_of_records(1)" in all_logs, "Only one item should be updated"
|
||||
assert (
|
||||
"Nr_of_records(2)" not in all_logs
|
||||
), "The inserted item should not show up again"
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_invoke_function_from_sqs_exception():
|
||||
function_name = str(uuid4())[0:6]
|
||||
logs_conn = boto3.client("logs", region_name="us-east-1")
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName=f"{function_name}_queue")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file_error()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func["FunctionArn"]
|
||||
)
|
||||
|
||||
assert response["EventSourceArn"] == queue.attributes["QueueArn"]
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
entries = []
|
||||
for i in range(3):
|
||||
body = {"uuid": str(uuid.uuid4()), "test": "test_{}".format(i)}
|
||||
entry = {"Id": str(i), "MessageBody": json.dumps(body)}
|
||||
entries.append(entry)
|
||||
|
||||
queue.send_messages(Entries=entries)
|
||||
|
||||
start = time.time()
|
||||
while (time.time() - start) < 30:
|
||||
result = logs_conn.describe_log_streams(
|
||||
logGroupName=f"/aws/lambda/{function_name}"
|
||||
)
|
||||
log_streams = result.get("logStreams")
|
||||
if not log_streams:
|
||||
time.sleep(1)
|
||||
continue
|
||||
assert len(log_streams) >= 1
|
||||
|
||||
result = logs_conn.get_log_events(
|
||||
logGroupName=f"/aws/lambda/{function_name}",
|
||||
logStreamName=log_streams[0]["logStreamName"],
|
||||
)
|
||||
for event in result.get("events"):
|
||||
if "I failed!" in event["message"]:
|
||||
messages = queue.receive_messages(MaxNumberOfMessages=10)
|
||||
# Verify messages are still visible and unprocessed
|
||||
assert len(messages) == 3
|
||||
return
|
||||
time.sleep(1)
|
||||
|
||||
assert False, "Test Failed"
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_logs
|
||||
@mock_sns
|
||||
@mock_lambda
|
||||
def test_invoke_function_from_sns():
|
||||
logs_conn = boto3.client("logs", region_name=_lambda_region)
|
||||
sns_conn = boto3.client("sns", region_name=_lambda_region)
|
||||
sns_conn.create_topic(Name="some-topic")
|
||||
topics_json = sns_conn.list_topics()
|
||||
topics = topics_json["Topics"]
|
||||
topic_arn = topics[0]["TopicArn"]
|
||||
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
function_name = str(uuid4())[0:6]
|
||||
result = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
sns_conn.subscribe(
|
||||
TopicArn=topic_arn, Protocol="lambda", Endpoint=result["FunctionArn"]
|
||||
)
|
||||
|
||||
result = sns_conn.publish(TopicArn=topic_arn, Message=json.dumps({}))
|
||||
|
||||
start = time.time()
|
||||
events = []
|
||||
while (time.time() - start) < 10:
|
||||
result = logs_conn.describe_log_streams(
|
||||
logGroupName=f"/aws/lambda/{function_name}"
|
||||
)
|
||||
log_streams = result.get("logStreams")
|
||||
if not log_streams:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
assert len(log_streams) == 1
|
||||
result = logs_conn.get_log_events(
|
||||
logGroupName=f"/aws/lambda/{function_name}",
|
||||
logStreamName=log_streams[0]["logStreamName"],
|
||||
)
|
||||
events = result.get("events")
|
||||
for event in events:
|
||||
if event["message"] == "get_test_zip_file3 success":
|
||||
return
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
assert False, "Expected message not found in logs:" + str(events)
|
||||
|
||||
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_list_event_source_mappings():
|
||||
function_name = str(uuid4())[0:6]
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName=f"{function_name}_queue")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func["FunctionArn"]
|
||||
)
|
||||
mappings = conn.list_event_source_mappings(EventSourceArn="123")
|
||||
mappings["EventSourceMappings"].should.have.length_of(0)
|
||||
|
||||
mappings = conn.list_event_source_mappings(
|
||||
EventSourceArn=queue.attributes["QueueArn"]
|
||||
)
|
||||
assert len(mappings["EventSourceMappings"]) >= 1
|
||||
assert mappings["EventSourceMappings"][0]["UUID"] == response["UUID"]
|
||||
assert mappings["EventSourceMappings"][0]["FunctionArn"] == func["FunctionArn"]
|
||||
|
||||
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_get_event_source_mapping():
|
||||
function_name = str(uuid4())[0:6]
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName=f"{function_name}_queue")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func["FunctionArn"]
|
||||
)
|
||||
mapping = conn.get_event_source_mapping(UUID=response["UUID"])
|
||||
assert mapping["UUID"] == response["UUID"]
|
||||
assert mapping["FunctionArn"] == func["FunctionArn"]
|
||||
|
||||
conn.get_event_source_mapping.when.called_with(UUID="1").should.throw(
|
||||
botocore.client.ClientError
|
||||
)
|
||||
|
||||
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_update_event_source_mapping():
|
||||
function_name = str(uuid4())[0:6]
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName=f"{function_name}_queue")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func1 = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
func2 = conn.create_function(
|
||||
FunctionName="testFunction2",
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func1["FunctionArn"]
|
||||
)
|
||||
assert response["FunctionArn"] == func1["FunctionArn"]
|
||||
assert response["BatchSize"] == 10
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
mapping = conn.update_event_source_mapping(
|
||||
UUID=response["UUID"], Enabled=False, BatchSize=2, FunctionName="testFunction2"
|
||||
)
|
||||
assert mapping["UUID"] == response["UUID"]
|
||||
assert mapping["FunctionArn"] == func2["FunctionArn"]
|
||||
assert mapping["State"] == "Disabled"
|
||||
assert mapping["BatchSize"] == 2
|
||||
|
||||
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_delete_event_source_mapping():
|
||||
function_name = str(uuid4())[0:6]
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName=f"{function_name}_queue")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func1 = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file3()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func1["FunctionArn"]
|
||||
)
|
||||
assert response["FunctionArn"] == func1["FunctionArn"]
|
||||
assert response["BatchSize"] == 10
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
response = conn.delete_event_source_mapping(UUID=response["UUID"])
|
||||
|
||||
assert response["State"] == "Deleting"
|
||||
conn.get_event_source_mapping.when.called_with(UUID=response["UUID"]).should.throw(
|
||||
botocore.client.ClientError
|
||||
)
|
296
tests/test_awslambda/test_lambda_invoke.py
Normal file
296
tests/test_awslambda/test_lambda_invoke.py
Normal file
@ -0,0 +1,296 @@
|
||||
import base64
|
||||
import botocore.client
|
||||
import boto3
|
||||
import io
|
||||
import json
|
||||
import pytest
|
||||
import sure # noqa
|
||||
import zipfile
|
||||
|
||||
from moto import (
|
||||
mock_lambda,
|
||||
mock_ec2,
|
||||
settings,
|
||||
)
|
||||
from uuid import uuid4
|
||||
from .utilities import (
|
||||
get_role_name,
|
||||
get_test_zip_file_error,
|
||||
get_test_zip_file1,
|
||||
get_zip_with_multiple_files,
|
||||
get_test_zip_file2,
|
||||
)
|
||||
|
||||
_lambda_region = "us-west-2"
|
||||
boto3.setup_default_session(region_name=_lambda_region)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_function_that_throws_error():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
function_name = str(uuid4())[0:6]
|
||||
conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file_error()},
|
||||
)
|
||||
|
||||
failure_response = conn.invoke(
|
||||
FunctionName=function_name, Payload=json.dumps({}), LogType="Tail"
|
||||
)
|
||||
|
||||
failure_response.should.have.key("FunctionError").being.equal("Handled")
|
||||
|
||||
payload = failure_response["Payload"].read().decode("utf-8")
|
||||
payload = json.loads(payload)
|
||||
payload["errorType"].should.equal("Exception")
|
||||
payload["errorMessage"].should.equal("I failed!")
|
||||
payload.should.have.key("stackTrace")
|
||||
|
||||
logs = base64.b64decode(failure_response["LogResult"]).decode("utf-8")
|
||||
logs.should.contain("START RequestId:")
|
||||
logs.should.contain("I failed!: Exception")
|
||||
logs.should.contain("Traceback (most recent call last):")
|
||||
logs.should.contain("END RequestId:")
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.parametrize("invocation_type", [None, "RequestResponse"])
|
||||
@pytest.mark.parametrize("key", ["FunctionName", "FunctionArn"])
|
||||
@mock_lambda
|
||||
def test_invoke_requestresponse_function(invocation_type, key):
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
function_name = str(uuid4())[0:6]
|
||||
fxn = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
name_or_arn = fxn[key]
|
||||
|
||||
# Only add invocation-type keyword-argument when provided, otherwise the request
|
||||
# fails to be validated
|
||||
kw = {}
|
||||
if invocation_type:
|
||||
kw["InvocationType"] = invocation_type
|
||||
|
||||
in_data = {"msg": "So long and thanks for all the fish"}
|
||||
success_result = conn.invoke(
|
||||
FunctionName=name_or_arn, Payload=json.dumps(in_data), LogType="Tail", **kw
|
||||
)
|
||||
|
||||
if "FunctionError" in success_result:
|
||||
assert False, success_result["Payload"].read().decode("utf-8")
|
||||
|
||||
success_result["StatusCode"].should.equal(200)
|
||||
success_result["ResponseMetadata"]["HTTPHeaders"]["content-type"].should.equal(
|
||||
"application/json"
|
||||
)
|
||||
logs = base64.b64decode(success_result["LogResult"]).decode("utf-8")
|
||||
|
||||
logs.should.contain("START RequestId:")
|
||||
logs.should.contain("custom log event")
|
||||
logs.should.contain("END RequestId:")
|
||||
|
||||
payload = success_result["Payload"].read().decode("utf-8")
|
||||
json.loads(payload).should.equal(in_data)
|
||||
|
||||
# Logs should not be returned by default, only when the LogType-param is supplied
|
||||
success_result = conn.invoke(
|
||||
FunctionName=name_or_arn, Payload=json.dumps(in_data), **kw
|
||||
)
|
||||
|
||||
success_result["StatusCode"].should.equal(200)
|
||||
success_result["ResponseMetadata"]["HTTPHeaders"]["content-type"].should.equal(
|
||||
"application/json"
|
||||
)
|
||||
assert "LogResult" not in success_result
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_event_function():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
function_name = str(uuid4())[0:6]
|
||||
conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
conn.invoke.when.called_with(
|
||||
FunctionName="notAFunction", InvocationType="Event", Payload="{}"
|
||||
).should.throw(botocore.client.ClientError)
|
||||
|
||||
in_data = {"msg": "So long and thanks for all the fish"}
|
||||
success_result = conn.invoke(
|
||||
FunctionName=function_name, InvocationType="Event", Payload=json.dumps(in_data)
|
||||
)
|
||||
success_result["StatusCode"].should.equal(202)
|
||||
json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_function_with_multiple_files_in_zip():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
function_name = str(uuid4())[0:6]
|
||||
conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python3.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_zip_with_multiple_files()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
in_data = {"msg": "So long and thanks for: "}
|
||||
success_result = conn.invoke(
|
||||
FunctionName=function_name, InvocationType="Event", Payload=json.dumps(in_data)
|
||||
)
|
||||
json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(
|
||||
{"msg": "So long and thanks for: stuff"}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_dryrun_function():
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
function_name = str(uuid4())[0:6]
|
||||
conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1(),},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
conn.invoke.when.called_with(
|
||||
FunctionName="notAFunction", InvocationType="Event", Payload="{}"
|
||||
).should.throw(botocore.client.ClientError)
|
||||
|
||||
in_data = {"msg": "So long and thanks for all the fish"}
|
||||
success_result = conn.invoke(
|
||||
FunctionName=function_name,
|
||||
InvocationType="DryRun",
|
||||
Payload=json.dumps(in_data),
|
||||
)
|
||||
success_result["StatusCode"].should.equal(204)
|
||||
|
||||
|
||||
if settings.TEST_SERVER_MODE:
|
||||
|
||||
@mock_ec2
|
||||
@mock_lambda
|
||||
def test_invoke_function_get_ec2_volume():
|
||||
conn = boto3.resource("ec2", _lambda_region)
|
||||
vol = conn.create_volume(Size=99, AvailabilityZone=_lambda_region)
|
||||
vol = conn.Volume(vol.id)
|
||||
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
function_name = str(uuid4())[0:6]
|
||||
conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python3.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file2()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
in_data = {"volume_id": vol.id}
|
||||
result = conn.invoke(
|
||||
FunctionName=function_name,
|
||||
InvocationType="RequestResponse",
|
||||
Payload=json.dumps(in_data),
|
||||
)
|
||||
result["StatusCode"].should.equal(200)
|
||||
actual_payload = json.loads(result["Payload"].read().decode("utf-8"))
|
||||
expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size}
|
||||
actual_payload.should.equal(expected_payload)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@mock_lambda
|
||||
def test_invoke_lambda_error():
|
||||
lambda_fx = """
|
||||
def lambda_handler(event, context):
|
||||
raise Exception('failsauce')
|
||||
"""
|
||||
zip_output = io.BytesIO()
|
||||
zip_file = zipfile.ZipFile(zip_output, "w", zipfile.ZIP_DEFLATED)
|
||||
zip_file.writestr("lambda_function.py", lambda_fx)
|
||||
zip_file.close()
|
||||
zip_output.seek(0)
|
||||
|
||||
client = boto3.client("lambda", region_name="us-east-1")
|
||||
client.create_function(
|
||||
FunctionName="test-lambda-fx",
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
Code={"ZipFile": zip_output.read()},
|
||||
)
|
||||
|
||||
result = client.invoke(
|
||||
FunctionName="test-lambda-fx", InvocationType="RequestResponse", LogType="Tail"
|
||||
)
|
||||
|
||||
assert "FunctionError" in result
|
||||
assert result["FunctionError"] == "Handled"
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.parametrize("key", ["FunctionName", "FunctionArn"])
|
||||
@mock_lambda
|
||||
def test_invoke_async_function(key):
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
function_name = str(uuid4())[0:6]
|
||||
fxn = conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
name_or_arn = fxn[key]
|
||||
|
||||
success_result = conn.invoke_async(
|
||||
FunctionName=name_or_arn, InvokeArgs=json.dumps({"test": "event"})
|
||||
)
|
||||
|
||||
success_result["Status"].should.equal(202)
|
125
tests/test_awslambda/test_lambda_layers.py
Normal file
125
tests/test_awslambda/test_lambda_layers.py
Normal file
@ -0,0 +1,125 @@
|
||||
import boto3
|
||||
import pytest
|
||||
import sure # noqa
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from freezegun import freeze_time
|
||||
from moto import mock_lambda, mock_s3
|
||||
from moto.core.exceptions import RESTError
|
||||
from moto.sts.models import ACCOUNT_ID
|
||||
from uuid import uuid4
|
||||
|
||||
from .utilities import get_role_name, get_test_zip_file1
|
||||
|
||||
_lambda_region = "us-west-2"
|
||||
boto3.setup_default_session(region_name=_lambda_region)
|
||||
|
||||
|
||||
@mock_lambda
|
||||
@mock_s3
|
||||
@freeze_time("2015-01-01 00:00:00")
|
||||
def test_get_lambda_layers():
|
||||
bucket_name = str(uuid4())
|
||||
s3_conn = boto3.client("s3", _lambda_region)
|
||||
s3_conn.create_bucket(
|
||||
Bucket=bucket_name,
|
||||
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
|
||||
)
|
||||
|
||||
zip_content = get_test_zip_file1()
|
||||
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
layer_name = str(uuid4())[0:6]
|
||||
|
||||
with pytest.raises((RESTError, ClientError)):
|
||||
conn.publish_layer_version(
|
||||
LayerName=layer_name,
|
||||
Content={},
|
||||
CompatibleRuntimes=["python3.6"],
|
||||
LicenseInfo="MIT",
|
||||
)
|
||||
conn.publish_layer_version(
|
||||
LayerName=layer_name,
|
||||
Content={"ZipFile": get_test_zip_file1()},
|
||||
CompatibleRuntimes=["python3.6"],
|
||||
LicenseInfo="MIT",
|
||||
)
|
||||
conn.publish_layer_version(
|
||||
LayerName=layer_name,
|
||||
Content={"S3Bucket": bucket_name, "S3Key": "test.zip"},
|
||||
CompatibleRuntimes=["python3.6"],
|
||||
LicenseInfo="MIT",
|
||||
)
|
||||
|
||||
result = conn.list_layer_versions(LayerName=layer_name)
|
||||
|
||||
for version in result["LayerVersions"]:
|
||||
version.pop("CreatedDate")
|
||||
result["LayerVersions"].sort(key=lambda x: x["Version"])
|
||||
expected_arn = "arn:aws:lambda:{0}:{1}:layer:{2}:".format(
|
||||
_lambda_region, ACCOUNT_ID, layer_name
|
||||
)
|
||||
result["LayerVersions"].should.equal(
|
||||
[
|
||||
{
|
||||
"Version": 1,
|
||||
"LayerVersionArn": expected_arn + "1",
|
||||
"CompatibleRuntimes": ["python3.6"],
|
||||
"Description": "",
|
||||
"LicenseInfo": "MIT",
|
||||
},
|
||||
{
|
||||
"Version": 2,
|
||||
"LayerVersionArn": expected_arn + "2",
|
||||
"CompatibleRuntimes": ["python3.6"],
|
||||
"Description": "",
|
||||
"LicenseInfo": "MIT",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
function_name = str(uuid4())[0:6]
|
||||
conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": bucket_name, "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
Environment={"Variables": {"test_variable": "test_value"}},
|
||||
Layers=[(expected_arn + "1")],
|
||||
)
|
||||
|
||||
result = conn.get_function_configuration(FunctionName=function_name)
|
||||
result["Layers"].should.equal(
|
||||
[{"Arn": (expected_arn + "1"), "CodeSize": len(zip_content)}]
|
||||
)
|
||||
result = conn.update_function_configuration(
|
||||
FunctionName=function_name, Layers=[(expected_arn + "2")]
|
||||
)
|
||||
result["Layers"].should.equal(
|
||||
[{"Arn": (expected_arn + "2"), "CodeSize": len(zip_content)}]
|
||||
)
|
||||
|
||||
# Test get layer versions for non existant layer
|
||||
result = conn.list_layer_versions(LayerName=f"{layer_name}2")
|
||||
result["LayerVersions"].should.equal([])
|
||||
|
||||
# Test create function with non existant layer version
|
||||
with pytest.raises((ValueError, ClientError)):
|
||||
conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"S3Bucket": bucket_name, "S3Key": "test.zip"},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
Environment={"Variables": {"test_variable": "test_value"}},
|
||||
Layers=[(expected_arn + "3")],
|
||||
)
|
145
tests/test_awslambda/utilities.py
Normal file
145
tests/test_awslambda/utilities.py
Normal file
@ -0,0 +1,145 @@
|
||||
import boto3
|
||||
import io
|
||||
import pytest
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import settings, mock_iam
|
||||
from uuid import uuid4
|
||||
|
||||
_lambda_region = "us-west-2"
|
||||
|
||||
|
||||
def _process_lambda(func_str):
|
||||
zip_output = io.BytesIO()
|
||||
zip_file = zipfile.ZipFile(zip_output, "w", zipfile.ZIP_DEFLATED)
|
||||
zip_file.writestr("lambda_function.py", func_str)
|
||||
zip_file.close()
|
||||
zip_output.seek(0)
|
||||
return zip_output.read()
|
||||
|
||||
|
||||
def get_test_zip_file1():
|
||||
pfunc = """
|
||||
def lambda_handler(event, context):
|
||||
print("custom log event")
|
||||
return event
|
||||
"""
|
||||
return _process_lambda(pfunc)
|
||||
|
||||
|
||||
def get_test_zip_file2():
|
||||
func_str = """
|
||||
import boto3
|
||||
|
||||
def lambda_handler(event, context):
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2', endpoint_url='http://{base_url}')
|
||||
|
||||
volume_id = event.get('volume_id')
|
||||
vol = ec2.Volume(volume_id)
|
||||
|
||||
return {{'id': vol.id, 'state': vol.state, 'size': vol.size}}
|
||||
""".format(
|
||||
base_url="motoserver:5000"
|
||||
if settings.TEST_SERVER_MODE
|
||||
else "ec2.us-west-2.amazonaws.com"
|
||||
)
|
||||
return _process_lambda(func_str)
|
||||
|
||||
|
||||
def get_test_zip_file3():
|
||||
pfunc = """
|
||||
def lambda_handler(event, context):
|
||||
print("Nr_of_records("+str(len(event['Records']))+")")
|
||||
print("get_test_zip_file3 success")
|
||||
return event
|
||||
"""
|
||||
return _process_lambda(pfunc)
|
||||
|
||||
|
||||
def get_test_zip_file_error():
|
||||
pfunc = """
|
||||
def lambda_handler(event, context):
|
||||
raise Exception('I failed!')
|
||||
"""
|
||||
return _process_lambda(pfunc)
|
||||
|
||||
|
||||
def get_zip_with_multiple_files():
|
||||
pfunc = """
|
||||
from utilities import util_function
|
||||
def lambda_handler(event, context):
|
||||
x = util_function()
|
||||
event["msg"] = event["msg"] + x
|
||||
return event
|
||||
"""
|
||||
ufunc = """
|
||||
def util_function():
|
||||
return "stuff"
|
||||
"""
|
||||
zip_output = io.BytesIO()
|
||||
zip_file = zipfile.ZipFile(zip_output, "a", zipfile.ZIP_DEFLATED)
|
||||
zip_file.writestr("lambda_function.py", pfunc)
|
||||
zip_file.close()
|
||||
zip_file = zipfile.ZipFile(zip_output, "a", zipfile.ZIP_DEFLATED)
|
||||
zip_file.writestr("utilities.py", ufunc)
|
||||
zip_file.close()
|
||||
zip_output.seek(0)
|
||||
return zip_output.read()
|
||||
|
||||
|
||||
def create_invalid_lambda(role):
|
||||
conn = boto3.client("lambda", _lambda_region)
|
||||
zip_content = get_test_zip_file1()
|
||||
function_name = str(uuid4())[0:6]
|
||||
with pytest.raises(ClientError) as err:
|
||||
conn.create_function(
|
||||
FunctionName=function_name,
|
||||
Runtime="python2.7",
|
||||
Role=role,
|
||||
Handler="lambda_function.handler",
|
||||
Code={"ZipFile": zip_content},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
return err
|
||||
|
||||
|
||||
def get_role_name():
|
||||
with mock_iam():
|
||||
iam = boto3.client("iam", region_name=_lambda_region)
|
||||
try:
|
||||
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
|
||||
except ClientError:
|
||||
return iam.create_role(
|
||||
RoleName="my-role",
|
||||
AssumeRolePolicyDocument="some policy",
|
||||
Path="/my-path/",
|
||||
)["Role"]["Arn"]
|
||||
|
||||
|
||||
def wait_for_log_msg(expected_msg, log_group):
|
||||
logs_conn = boto3.client("logs", region_name="us-east-1")
|
||||
received_messages = []
|
||||
start = time.time()
|
||||
while (time.time() - start) < 30:
|
||||
result = logs_conn.describe_log_streams(logGroupName=log_group)
|
||||
log_streams = result.get("logStreams")
|
||||
if not log_streams:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
for log_stream in log_streams:
|
||||
result = logs_conn.get_log_events(
|
||||
logGroupName=log_group, logStreamName=log_stream["logStreamName"],
|
||||
)
|
||||
received_messages.extend(
|
||||
[event["message"] for event in result.get("events")]
|
||||
)
|
||||
if expected_msg in received_messages:
|
||||
return True, received_messages
|
||||
time.sleep(1)
|
||||
return False, received_messages
|
@ -648,7 +648,7 @@ def test_rotate_secret_rotation_period_too_long():
|
||||
|
||||
|
||||
def get_rotation_zip_file():
|
||||
from tests.test_awslambda.test_lambda import _process_lambda
|
||||
from tests.test_awslambda.utilities import _process_lambda
|
||||
|
||||
func_str = """
|
||||
import boto3
|
||||
@ -723,7 +723,7 @@ if settings.TEST_SERVER_MODE:
|
||||
@mock_lambda
|
||||
@mock_secretsmanager
|
||||
def test_rotate_secret_using_lambda():
|
||||
from tests.test_awslambda.test_lambda import get_role_name
|
||||
from tests.test_awslambda.utilities import get_role_name
|
||||
|
||||
# Passing a `RotationLambdaARN` value to `rotate_secret` should invoke lambda
|
||||
lambda_conn = boto3.client(
|
||||
|
Loading…
Reference in New Issue
Block a user