Remove unnecessary dependencies EC2/SQS (#4094)
* Add missing dependencies for EFS
This commit is contained in:
parent
6b4032a9a8
commit
11883a1fda
18
.github/workflows/dependency_test.yml
vendored
Normal file
18
.github/workflows/dependency_test.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
name: DependencyTest
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
runtest:
|
||||
name: Run Dependency Test
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [ 3.8 ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
- name: Run test
|
||||
run: |
|
||||
scripts/dependency_test.sh
|
@ -16,7 +16,6 @@ from moto.core.utils import (
|
||||
camelcase_to_underscores,
|
||||
)
|
||||
from moto.sqs import sqs_backends
|
||||
from moto.awslambda import lambda_backends
|
||||
|
||||
from .exceptions import (
|
||||
SNSNotFoundError,
|
||||
@ -211,6 +210,8 @@ class Subscription(BaseModel):
|
||||
else:
|
||||
assert False
|
||||
|
||||
from moto.awslambda import lambda_backends
|
||||
|
||||
lambda_backends[region].send_sns_message(
|
||||
function_name, message, subject=subject, qualifier=qualifier
|
||||
)
|
||||
|
@ -498,7 +498,6 @@ class Queue(CloudFormationModel):
|
||||
return
|
||||
|
||||
self._messages.append(message)
|
||||
from moto.awslambda import lambda_backends
|
||||
|
||||
for arn, esm in self.lambda_event_source_mappings.items():
|
||||
backend = sqs_backends[self.region]
|
||||
@ -516,6 +515,8 @@ class Queue(CloudFormationModel):
|
||||
self.visibility_timeout,
|
||||
)
|
||||
|
||||
from moto.awslambda import lambda_backends
|
||||
|
||||
result = lambda_backends[self.region].send_sqs_batch(
|
||||
arn, messages, self.queue_arn
|
||||
)
|
||||
|
@ -33,7 +33,7 @@ valid_service() {
|
||||
# Verify whether this is a valid service
|
||||
# We'll ignore metadata folders, and folders that test generic Moto behaviour
|
||||
# We'll also ignore CloudFormation, as it will always depend on other services
|
||||
local ignore_moto_folders="core instance_metadata __pycache__ templates cloudformation"
|
||||
local ignore_moto_folders="core instance_metadata __pycache__ templates cloudformation utilities"
|
||||
if echo $ignore_moto_folders | grep -q "$1"; then
|
||||
return 1
|
||||
else
|
17
setup.py
17
setup.py
@ -76,17 +76,20 @@ extras_per_service = {
|
||||
"batch": [_dep_docker],
|
||||
"cloudformation": [_dep_docker, _dep_PyYAML, _dep_cfn_lint],
|
||||
"cognitoidp": [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
"dynamodb2": [_dep_docker],
|
||||
"dynamodbstreams": [_dep_docker],
|
||||
"ec2": [_dep_docker, _dep_sshpubkeys],
|
||||
"ec2": [_dep_sshpubkeys],
|
||||
"iotdata": [_dep_jsondiff],
|
||||
"s3": [_dep_PyYAML],
|
||||
"ses": [_dep_docker],
|
||||
"sns": [_dep_docker],
|
||||
"sqs": [_dep_docker],
|
||||
"ssm": [_dep_docker, _dep_PyYAML],
|
||||
"ses": [],
|
||||
"sns": [],
|
||||
"sqs": [],
|
||||
"ssm": [_dep_PyYAML],
|
||||
"xray": [_dep_aws_xray_sdk],
|
||||
}
|
||||
# When a Table has a Stream, we'll always need to import AWSLambda to search for a corresponding function to send the table data to
|
||||
extras_per_service["dynamodb2"] = extras_per_service["awslambda"]
|
||||
extras_per_service["dynamodbstreams"] = extras_per_service["awslambda"]
|
||||
# EFS depends on EC2 to find subnets etc
|
||||
extras_per_service["efs"] = extras_per_service["ec2"]
|
||||
extras_require = {
|
||||
"all": all_extra_deps,
|
||||
"server": all_server_deps,
|
||||
|
@ -6,17 +6,12 @@ import uuid
|
||||
import boto3
|
||||
import pytest
|
||||
import sure # noqa
|
||||
import sys
|
||||
from botocore.exceptions import ClientError
|
||||
from freezegun import freeze_time
|
||||
from moto import mock_acm, settings
|
||||
from moto.core import ACCOUNT_ID
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
import mock
|
||||
from unittest import SkipTest
|
||||
else:
|
||||
from unittest import SkipTest, mock
|
||||
from unittest import SkipTest, mock
|
||||
|
||||
RESOURCE_FOLDER = os.path.join(os.path.dirname(__file__), "resources")
|
||||
_GET_RESOURCE = lambda x: open(os.path.join(RESOURCE_FOLDER, x), "rb").read()
|
||||
|
@ -57,29 +57,3 @@ def test_camelcase_to_pascal(input, expected):
|
||||
@freeze_time("2015-01-01 12:00:00")
|
||||
def test_unix_time():
|
||||
unix_time().should.equal(1420113600.0)
|
||||
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
# Tests for unicode removals (Python 2 only)
|
||||
def _verify_no_unicode(blob):
|
||||
"""Verify that no unicode values exist"""
|
||||
if type(blob) == dict:
|
||||
for key, value in blob.items():
|
||||
assert type(key) != unicode
|
||||
_verify_no_unicode(value)
|
||||
|
||||
elif type(blob) in [list, set]:
|
||||
for item in blob:
|
||||
_verify_no_unicode(item)
|
||||
|
||||
assert blob != unicode
|
||||
|
||||
def test_py2_strip_unicode_keys():
|
||||
bad_dict = {
|
||||
"some": "value",
|
||||
"a": {"nested": ["List", "of", {"unicode": "values"}]},
|
||||
"and a": {"nested", "set", "of", 5, "values"},
|
||||
}
|
||||
|
||||
result = py2_strip_unicode_keys(copy.deepcopy(bad_dict))
|
||||
_verify_no_unicode(result)
|
||||
|
@ -4341,10 +4341,6 @@ def test_s3_public_access_block_to_config_dict():
|
||||
"RestrictPublicBuckets": "False",
|
||||
}
|
||||
|
||||
# Python 2 unicode issues:
|
||||
if sys.version_info[0] < 3:
|
||||
public_access_block = py2_strip_unicode_keys(public_access_block)
|
||||
|
||||
# Add a public access block:
|
||||
s3_config_query.backends["global"].put_bucket_public_access_block(
|
||||
"bucket1", public_access_block
|
||||
@ -4811,11 +4807,8 @@ def test_s3_config_dict():
|
||||
}
|
||||
)
|
||||
|
||||
# The policy is a byte array -- need to encode in Python 3 -- for Python 2 just pass the raw string in:
|
||||
if sys.version_info[0] > 2:
|
||||
pass_policy = bytes(policy, "utf-8")
|
||||
else:
|
||||
pass_policy = policy
|
||||
# The policy is a byte array -- need to encode in Python 3
|
||||
pass_policy = bytes(policy, "utf-8")
|
||||
s3_config_query.backends["global"].set_bucket_policy("bucket1", pass_policy)
|
||||
|
||||
# Get the us-west-2 bucket and verify that it works properly:
|
||||
|
@ -10,23 +10,16 @@ import hashlib
|
||||
import boto
|
||||
import boto3
|
||||
import botocore.exceptions
|
||||
import sys
|
||||
import sure # noqa
|
||||
from boto.exception import SQSError
|
||||
from boto.sqs.message import Message, RawMessage
|
||||
from botocore.exceptions import ClientError
|
||||
from freezegun import freeze_time
|
||||
from moto import mock_sqs, mock_sqs_deprecated, mock_lambda, mock_logs, settings
|
||||
from unittest import SkipTest
|
||||
from moto import mock_sqs, mock_sqs_deprecated, mock_logs, settings
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
import mock
|
||||
from unittest import SkipTest
|
||||
else:
|
||||
from unittest import SkipTest, mock
|
||||
from unittest import SkipTest, mock
|
||||
import pytest
|
||||
from tests.helpers import requires_boto_gte
|
||||
from tests.test_awslambda.test_lambda import get_test_zip_file1, get_role_name
|
||||
from moto.core import ACCOUNT_ID
|
||||
from moto.sqs.models import (
|
||||
MAXIMUM_MESSAGE_SIZE_ATTR_LOWER_BOUND,
|
||||
@ -2643,64 +2636,6 @@ def test_send_messages_to_fifo_without_message_group_id():
|
||||
)
|
||||
|
||||
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_invoke_function_from_sqs_exception():
|
||||
logs_conn = boto3.client("logs", region_name="us-east-1")
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName="test-sqs-queue1")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func["FunctionArn"]
|
||||
)
|
||||
|
||||
assert response["EventSourceArn"] == queue.attributes["QueueArn"]
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
entries = [
|
||||
{
|
||||
"Id": "1",
|
||||
"MessageBody": json.dumps({"uuid": str(uuid.uuid4()), "test": "test"}),
|
||||
}
|
||||
]
|
||||
|
||||
queue.send_messages(Entries=entries)
|
||||
|
||||
start = time.time()
|
||||
while (time.time() - start) < 30:
|
||||
result = logs_conn.describe_log_streams(logGroupName="/aws/lambda/testFunction")
|
||||
log_streams = result.get("logStreams")
|
||||
if not log_streams:
|
||||
time.sleep(1)
|
||||
continue
|
||||
assert len(log_streams) >= 1
|
||||
|
||||
result = logs_conn.get_log_events(
|
||||
logGroupName="/aws/lambda/testFunction",
|
||||
logStreamName=log_streams[0]["logStreamName"],
|
||||
)
|
||||
for event in result.get("events"):
|
||||
if "custom log event" in event["message"]:
|
||||
return
|
||||
time.sleep(1)
|
||||
|
||||
assert False, "Test Failed"
|
||||
|
||||
|
||||
@mock_sqs
|
||||
def test_maximum_message_size_attribute_default():
|
||||
sqs = boto3.resource("sqs", region_name="eu-west-3")
|
||||
|
65
tests/test_sqs/test_sqs_integration.py
Normal file
65
tests/test_sqs/test_sqs_integration.py
Normal file
@ -0,0 +1,65 @@
|
||||
import boto3
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from moto import mock_lambda, mock_sqs, mock_logs
|
||||
from tests.test_awslambda.test_lambda import get_test_zip_file1, get_role_name
|
||||
|
||||
|
||||
@mock_logs
|
||||
@mock_lambda
|
||||
@mock_sqs
|
||||
def test_invoke_function_from_sqs_exception():
|
||||
logs_conn = boto3.client("logs", region_name="us-east-1")
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
queue = sqs.create_queue(QueueName="test-sqs-queue1")
|
||||
|
||||
conn = boto3.client("lambda", region_name="us-east-1")
|
||||
func = conn.create_function(
|
||||
FunctionName="testFunction",
|
||||
Runtime="python2.7",
|
||||
Role=get_role_name(),
|
||||
Handler="lambda_function.lambda_handler",
|
||||
Code={"ZipFile": get_test_zip_file1()},
|
||||
Description="test lambda function",
|
||||
Timeout=3,
|
||||
MemorySize=128,
|
||||
Publish=True,
|
||||
)
|
||||
|
||||
response = conn.create_event_source_mapping(
|
||||
EventSourceArn=queue.attributes["QueueArn"], FunctionName=func["FunctionArn"]
|
||||
)
|
||||
|
||||
assert response["EventSourceArn"] == queue.attributes["QueueArn"]
|
||||
assert response["State"] == "Enabled"
|
||||
|
||||
entries = [
|
||||
{
|
||||
"Id": "1",
|
||||
"MessageBody": json.dumps({"uuid": str(uuid.uuid4()), "test": "test"}),
|
||||
}
|
||||
]
|
||||
|
||||
queue.send_messages(Entries=entries)
|
||||
|
||||
start = time.time()
|
||||
while (time.time() - start) < 30:
|
||||
result = logs_conn.describe_log_streams(logGroupName="/aws/lambda/testFunction")
|
||||
log_streams = result.get("logStreams")
|
||||
if not log_streams:
|
||||
time.sleep(1)
|
||||
continue
|
||||
assert len(log_streams) >= 1
|
||||
|
||||
result = logs_conn.get_log_events(
|
||||
logGroupName="/aws/lambda/testFunction",
|
||||
logStreamName=log_streams[0]["logStreamName"],
|
||||
)
|
||||
for event in result.get("events"):
|
||||
if "custom log event" in event["message"]:
|
||||
return
|
||||
time.sleep(1)
|
||||
|
||||
assert False, "Test Failed"
|
@ -4,20 +4,15 @@ import boto3
|
||||
import json
|
||||
import os
|
||||
import sure # noqa
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from dateutil.tz import tzutc
|
||||
from botocore.exceptions import ClientError
|
||||
import pytest
|
||||
|
||||
from moto import mock_cloudformation, mock_sts, mock_stepfunctions
|
||||
from moto import mock_sts, mock_stepfunctions
|
||||
from moto.core import ACCOUNT_ID
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
import mock
|
||||
from unittest import SkipTest
|
||||
else:
|
||||
from unittest import SkipTest, mock
|
||||
from unittest import SkipTest, mock
|
||||
|
||||
region = "us-east-1"
|
||||
simple_definition = (
|
||||
|
Loading…
x
Reference in New Issue
Block a user