Remove unnecessary dependencies EC2/SQS (#4094)

* Add missing dependencies for EFS
This commit is contained in:
Bert Blommers 2021-07-29 06:38:16 +01:00 committed by GitHub
parent 6b4032a9a8
commit 11883a1fda
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 105 additions and 125 deletions

18
.github/workflows/dependency_test.yml vendored Normal file
View File

@ -0,0 +1,18 @@
name: DependencyTest
on: workflow_dispatch
jobs:
runtest:
name: Run Dependency Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ 3.8 ]
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Run test
run: |
scripts/dependency_test.sh

View File

@ -16,7 +16,6 @@ from moto.core.utils import (
camelcase_to_underscores, camelcase_to_underscores,
) )
from moto.sqs import sqs_backends from moto.sqs import sqs_backends
from moto.awslambda import lambda_backends
from .exceptions import ( from .exceptions import (
SNSNotFoundError, SNSNotFoundError,
@ -211,6 +210,8 @@ class Subscription(BaseModel):
else: else:
assert False assert False
from moto.awslambda import lambda_backends
lambda_backends[region].send_sns_message( lambda_backends[region].send_sns_message(
function_name, message, subject=subject, qualifier=qualifier function_name, message, subject=subject, qualifier=qualifier
) )

View File

@ -498,7 +498,6 @@ class Queue(CloudFormationModel):
return return
self._messages.append(message) self._messages.append(message)
from moto.awslambda import lambda_backends
for arn, esm in self.lambda_event_source_mappings.items(): for arn, esm in self.lambda_event_source_mappings.items():
backend = sqs_backends[self.region] backend = sqs_backends[self.region]
@ -516,6 +515,8 @@ class Queue(CloudFormationModel):
self.visibility_timeout, self.visibility_timeout,
) )
from moto.awslambda import lambda_backends
result = lambda_backends[self.region].send_sqs_batch( result = lambda_backends[self.region].send_sqs_batch(
arn, messages, self.queue_arn arn, messages, self.queue_arn
) )

View File

@ -33,7 +33,7 @@ valid_service() {
# Verify whether this is a valid service # Verify whether this is a valid service
# We'll ignore metadata folders, and folders that test generic Moto behaviour # We'll ignore metadata folders, and folders that test generic Moto behaviour
# We'll also ignore CloudFormation, as it will always depend on other services # We'll also ignore CloudFormation, as it will always depend on other services
local ignore_moto_folders="core instance_metadata __pycache__ templates cloudformation" local ignore_moto_folders="core instance_metadata __pycache__ templates cloudformation utilities"
if echo $ignore_moto_folders | grep -q "$1"; then if echo $ignore_moto_folders | grep -q "$1"; then
return 1 return 1
else else

View File

@ -76,17 +76,20 @@ extras_per_service = {
"batch": [_dep_docker], "batch": [_dep_docker],
"cloudformation": [_dep_docker, _dep_PyYAML, _dep_cfn_lint], "cloudformation": [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
"cognitoidp": [_dep_python_jose, _dep_python_jose_ecdsa_pin], "cognitoidp": [_dep_python_jose, _dep_python_jose_ecdsa_pin],
"dynamodb2": [_dep_docker], "ec2": [_dep_sshpubkeys],
"dynamodbstreams": [_dep_docker],
"ec2": [_dep_docker, _dep_sshpubkeys],
"iotdata": [_dep_jsondiff], "iotdata": [_dep_jsondiff],
"s3": [_dep_PyYAML], "s3": [_dep_PyYAML],
"ses": [_dep_docker], "ses": [],
"sns": [_dep_docker], "sns": [],
"sqs": [_dep_docker], "sqs": [],
"ssm": [_dep_docker, _dep_PyYAML], "ssm": [_dep_PyYAML],
"xray": [_dep_aws_xray_sdk], "xray": [_dep_aws_xray_sdk],
} }
# When a Table has a Stream, we'll always need to import AWSLambda to search for a corresponding function to send the table data to
extras_per_service["dynamodb2"] = extras_per_service["awslambda"]
extras_per_service["dynamodbstreams"] = extras_per_service["awslambda"]
# EFS depends on EC2 to find subnets etc
extras_per_service["efs"] = extras_per_service["ec2"]
extras_require = { extras_require = {
"all": all_extra_deps, "all": all_extra_deps,
"server": all_server_deps, "server": all_server_deps,

View File

@ -6,17 +6,12 @@ import uuid
import boto3 import boto3
import pytest import pytest
import sure # noqa import sure # noqa
import sys
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from freezegun import freeze_time from freezegun import freeze_time
from moto import mock_acm, settings from moto import mock_acm, settings
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
if sys.version_info[0] < 3: from unittest import SkipTest, mock
import mock
from unittest import SkipTest
else:
from unittest import SkipTest, mock
RESOURCE_FOLDER = os.path.join(os.path.dirname(__file__), "resources") RESOURCE_FOLDER = os.path.join(os.path.dirname(__file__), "resources")
_GET_RESOURCE = lambda x: open(os.path.join(RESOURCE_FOLDER, x), "rb").read() _GET_RESOURCE = lambda x: open(os.path.join(RESOURCE_FOLDER, x), "rb").read()

View File

@ -57,29 +57,3 @@ def test_camelcase_to_pascal(input, expected):
@freeze_time("2015-01-01 12:00:00") @freeze_time("2015-01-01 12:00:00")
def test_unix_time(): def test_unix_time():
unix_time().should.equal(1420113600.0) unix_time().should.equal(1420113600.0)
if sys.version_info[0] < 3:
# Tests for unicode removals (Python 2 only)
def _verify_no_unicode(blob):
"""Verify that no unicode values exist"""
if type(blob) == dict:
for key, value in blob.items():
assert type(key) != unicode
_verify_no_unicode(value)
elif type(blob) in [list, set]:
for item in blob:
_verify_no_unicode(item)
assert blob != unicode
def test_py2_strip_unicode_keys():
bad_dict = {
"some": "value",
"a": {"nested": ["List", "of", {"unicode": "values"}]},
"and a": {"nested", "set", "of", 5, "values"},
}
result = py2_strip_unicode_keys(copy.deepcopy(bad_dict))
_verify_no_unicode(result)

View File

@ -4341,10 +4341,6 @@ def test_s3_public_access_block_to_config_dict():
"RestrictPublicBuckets": "False", "RestrictPublicBuckets": "False",
} }
# Python 2 unicode issues:
if sys.version_info[0] < 3:
public_access_block = py2_strip_unicode_keys(public_access_block)
# Add a public access block: # Add a public access block:
s3_config_query.backends["global"].put_bucket_public_access_block( s3_config_query.backends["global"].put_bucket_public_access_block(
"bucket1", public_access_block "bucket1", public_access_block
@ -4811,11 +4807,8 @@ def test_s3_config_dict():
} }
) )
# The policy is a byte array -- need to encode in Python 3 -- for Python 2 just pass the raw string in: # The policy is a byte array -- need to encode in Python 3
if sys.version_info[0] > 2: pass_policy = bytes(policy, "utf-8")
pass_policy = bytes(policy, "utf-8")
else:
pass_policy = policy
s3_config_query.backends["global"].set_bucket_policy("bucket1", pass_policy) s3_config_query.backends["global"].set_bucket_policy("bucket1", pass_policy)
# Get the us-west-2 bucket and verify that it works properly: # Get the us-west-2 bucket and verify that it works properly:

View File

@ -10,23 +10,16 @@ import hashlib
import boto import boto
import boto3 import boto3
import botocore.exceptions import botocore.exceptions
import sys
import sure # noqa import sure # noqa
from boto.exception import SQSError from boto.exception import SQSError
from boto.sqs.message import Message, RawMessage from boto.sqs.message import Message, RawMessage
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from freezegun import freeze_time from freezegun import freeze_time
from moto import mock_sqs, mock_sqs_deprecated, mock_lambda, mock_logs, settings from moto import mock_sqs, mock_sqs_deprecated, mock_logs, settings
from unittest import SkipTest
if sys.version_info[0] < 3: from unittest import SkipTest, mock
import mock
from unittest import SkipTest
else:
from unittest import SkipTest, mock
import pytest import pytest
from tests.helpers import requires_boto_gte from tests.helpers import requires_boto_gte
from tests.test_awslambda.test_lambda import get_test_zip_file1, get_role_name
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
from moto.sqs.models import ( from moto.sqs.models import (
MAXIMUM_MESSAGE_SIZE_ATTR_LOWER_BOUND, MAXIMUM_MESSAGE_SIZE_ATTR_LOWER_BOUND,
@ -2643,64 +2636,6 @@ def test_send_messages_to_fifo_without_message_group_id():
) )
@mock_logs
@mock_lambda
@mock_sqs
def test_invoke_function_from_sqs_exception():
logs_conn = boto3.client("logs", region_name="us-east-1")
sqs = boto3.resource("sqs", region_name="us-east-1")
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client("lambda", region_name="us-east-1")
func = conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1()},
Description="test lambda function",
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func["FunctionArn"]
)
assert response["EventSourceArn"] == queue.attributes["QueueArn"]
assert response["State"] == "Enabled"
entries = [
{
"Id": "1",
"MessageBody": json.dumps({"uuid": str(uuid.uuid4()), "test": "test"}),
}
]
queue.send_messages(Entries=entries)
start = time.time()
while (time.time() - start) < 30:
result = logs_conn.describe_log_streams(logGroupName="/aws/lambda/testFunction")
log_streams = result.get("logStreams")
if not log_streams:
time.sleep(1)
continue
assert len(log_streams) >= 1
result = logs_conn.get_log_events(
logGroupName="/aws/lambda/testFunction",
logStreamName=log_streams[0]["logStreamName"],
)
for event in result.get("events"):
if "custom log event" in event["message"]:
return
time.sleep(1)
assert False, "Test Failed"
@mock_sqs @mock_sqs
def test_maximum_message_size_attribute_default(): def test_maximum_message_size_attribute_default():
sqs = boto3.resource("sqs", region_name="eu-west-3") sqs = boto3.resource("sqs", region_name="eu-west-3")

View File

@ -0,0 +1,65 @@
import boto3
import json
import time
import uuid
from moto import mock_lambda, mock_sqs, mock_logs
from tests.test_awslambda.test_lambda import get_test_zip_file1, get_role_name
@mock_logs
@mock_lambda
@mock_sqs
def test_invoke_function_from_sqs_exception():
logs_conn = boto3.client("logs", region_name="us-east-1")
sqs = boto3.resource("sqs", region_name="us-east-1")
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client("lambda", region_name="us-east-1")
func = conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1()},
Description="test lambda function",
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func["FunctionArn"]
)
assert response["EventSourceArn"] == queue.attributes["QueueArn"]
assert response["State"] == "Enabled"
entries = [
{
"Id": "1",
"MessageBody": json.dumps({"uuid": str(uuid.uuid4()), "test": "test"}),
}
]
queue.send_messages(Entries=entries)
start = time.time()
while (time.time() - start) < 30:
result = logs_conn.describe_log_streams(logGroupName="/aws/lambda/testFunction")
log_streams = result.get("logStreams")
if not log_streams:
time.sleep(1)
continue
assert len(log_streams) >= 1
result = logs_conn.get_log_events(
logGroupName="/aws/lambda/testFunction",
logStreamName=log_streams[0]["logStreamName"],
)
for event in result.get("events"):
if "custom log event" in event["message"]:
return
time.sleep(1)
assert False, "Test Failed"

View File

@ -4,20 +4,15 @@ import boto3
import json import json
import os import os
import sure # noqa import sure # noqa
import sys
from datetime import datetime from datetime import datetime
from dateutil.tz import tzutc from dateutil.tz import tzutc
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
import pytest import pytest
from moto import mock_cloudformation, mock_sts, mock_stepfunctions from moto import mock_sts, mock_stepfunctions
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
if sys.version_info[0] < 3: from unittest import SkipTest, mock
import mock
from unittest import SkipTest
else:
from unittest import SkipTest, mock
region = "us-east-1" region = "us-east-1"
simple_definition = ( simple_definition = (