Merge pull request #1 from spulec/master

Sync upstream
This commit is contained in:
Bert Blommers 2019-08-23 09:34:15 +01:00 committed by GitHub
commit d4aa55760c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 806 additions and 55 deletions

View File

@ -1,6 +1,7 @@
from __future__ import unicode_literals
import base64
import time
from collections import defaultdict
import copy
import datetime
@ -31,6 +32,7 @@ from moto.logs.models import logs_backends
from moto.s3.exceptions import MissingBucket, MissingKey
from moto import settings
from .utils import make_function_arn, make_function_ver_arn
from moto.sqs import sqs_backends
logger = logging.getLogger(__name__)
@ -429,24 +431,59 @@ class LambdaFunction(BaseModel):
class EventSourceMapping(BaseModel):
def __init__(self, spec):
# required
self.function_name = spec['FunctionName']
self.function_arn = spec['FunctionArn']
self.event_source_arn = spec['EventSourceArn']
self.starting_position = spec['StartingPosition']
self.uuid = str(uuid.uuid4())
self.last_modified = time.mktime(datetime.datetime.utcnow().timetuple())
# BatchSize service default/max mapping
batch_size_map = {
'kinesis': (100, 10000),
'dynamodb': (100, 1000),
'sqs': (10, 10),
}
source_type = self.event_source_arn.split(":")[2].lower()
batch_size_entry = batch_size_map.get(source_type)
if batch_size_entry:
# Use service default if not provided
batch_size = int(spec.get('BatchSize', batch_size_entry[0]))
if batch_size > batch_size_entry[1]:
raise ValueError("InvalidParameterValueException",
"BatchSize {} exceeds the max of {}".format(batch_size, batch_size_entry[1]))
else:
self.batch_size = batch_size
else:
raise ValueError("InvalidParameterValueException",
"Unsupported event source type")
# optional
self.batch_size = spec.get('BatchSize', 100)
self.starting_position = spec.get('StartingPosition', 'TRIM_HORIZON')
self.enabled = spec.get('Enabled', True)
self.starting_position_timestamp = spec.get('StartingPositionTimestamp',
None)
def get_configuration(self):
return {
'UUID': self.uuid,
'BatchSize': self.batch_size,
'EventSourceArn': self.event_source_arn,
'FunctionArn': self.function_arn,
'LastModified': self.last_modified,
'LastProcessingResult': '',
'State': 'Enabled' if self.enabled else 'Disabled',
'StateTransitionReason': 'User initiated'
}
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json,
region_name):
properties = cloudformation_json['Properties']
func = lambda_backends[region_name].get_function(properties['FunctionName'])
spec = {
'FunctionName': properties['FunctionName'],
'FunctionArn': func.function_arn,
'EventSourceArn': properties['EventSourceArn'],
'StartingPosition': properties['StartingPosition']
'StartingPosition': properties['StartingPosition'],
'BatchSize': properties.get('BatchSize', 100)
}
optional_properties = 'BatchSize Enabled StartingPositionTimestamp'.split()
for prop in optional_properties:
@ -466,8 +503,10 @@ class LambdaVersion(BaseModel):
def create_from_cloudformation_json(cls, resource_name, cloudformation_json,
region_name):
properties = cloudformation_json['Properties']
function_name = properties['FunctionName']
func = lambda_backends[region_name].publish_function(function_name)
spec = {
'Version': properties.get('Version')
'Version': func.version
}
return LambdaVersion(spec)
@ -515,6 +554,9 @@ class LambdaStorage(object):
def get_arn(self, arn):
return self._arns.get(arn, None)
def get_function_by_name_or_arn(self, input):
return self.get_function(input) or self.get_arn(input)
def put_function(self, fn):
"""
:param fn: Function
@ -596,6 +638,7 @@ class LambdaStorage(object):
class LambdaBackend(BaseBackend):
def __init__(self, region_name):
self._lambdas = LambdaStorage()
self._event_source_mappings = {}
self.region_name = region_name
def reset(self):
@ -617,6 +660,40 @@ class LambdaBackend(BaseBackend):
fn.version = ver.version
return fn
def create_event_source_mapping(self, spec):
required = [
'EventSourceArn',
'FunctionName',
]
for param in required:
if not spec.get(param):
raise RESTError('InvalidParameterValueException', 'Missing {}'.format(param))
# Validate function name
func = self._lambdas.get_function_by_name_or_arn(spec.pop('FunctionName', ''))
if not func:
raise RESTError('ResourceNotFoundException', 'Invalid FunctionName')
# Validate queue
for queue in sqs_backends[self.region_name].queues.values():
if queue.queue_arn == spec['EventSourceArn']:
if queue.lambda_event_source_mappings.get('func.function_arn'):
# TODO: Correct exception?
raise RESTError('ResourceConflictException', 'The resource already exists.')
if queue.fifo_queue:
raise RESTError('InvalidParameterValueException',
'{} is FIFO'.format(queue.queue_arn))
else:
spec.update({'FunctionArn': func.function_arn})
esm = EventSourceMapping(spec)
self._event_source_mappings[esm.uuid] = esm
# Set backend function on queue
queue.lambda_event_source_mappings[esm.function_arn] = esm
return esm
raise RESTError('ResourceNotFoundException', 'Invalid EventSourceArn')
def publish_function(self, function_name):
return self._lambdas.publish_function(function_name)
@ -626,6 +703,33 @@ class LambdaBackend(BaseBackend):
def list_versions_by_function(self, function_name):
return self._lambdas.list_versions_by_function(function_name)
def get_event_source_mapping(self, uuid):
return self._event_source_mappings.get(uuid)
def delete_event_source_mapping(self, uuid):
return self._event_source_mappings.pop(uuid)
def update_event_source_mapping(self, uuid, spec):
esm = self.get_event_source_mapping(uuid)
if esm:
if spec.get('FunctionName'):
func = self._lambdas.get_function_by_name_or_arn(spec.get('FunctionName'))
esm.function_arn = func.function_arn
if 'BatchSize' in spec:
esm.batch_size = spec['BatchSize']
if 'Enabled' in spec:
esm.enabled = spec['Enabled']
return esm
return False
def list_event_source_mappings(self, event_source_arn, function_name):
esms = list(self._event_source_mappings.values())
if event_source_arn:
esms = list(filter(lambda x: x.event_source_arn == event_source_arn, esms))
if function_name:
esms = list(filter(lambda x: x.function_name == function_name, esms))
return esms
def get_function_by_arn(self, function_arn):
return self._lambdas.get_arn(function_arn)
@ -635,7 +739,43 @@ class LambdaBackend(BaseBackend):
def list_functions(self):
return self._lambdas.all()
def send_message(self, function_name, message, subject=None, qualifier=None):
def send_sqs_batch(self, function_arn, messages, queue_arn):
success = True
for message in messages:
func = self.get_function_by_arn(function_arn)
result = self._send_sqs_message(func, message, queue_arn)
if not result:
success = False
return success
def _send_sqs_message(self, func, message, queue_arn):
event = {
"Records": [
{
"messageId": message.id,
"receiptHandle": message.receipt_handle,
"body": message.body,
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1545082649183",
"SenderId": "AIDAIENQZJOLO23YVJ4VO",
"ApproximateFirstReceiveTimestamp": "1545082649185"
},
"messageAttributes": {},
"md5OfBody": "098f6bcd4621d373cade4e832627b4f6",
"eventSource": "aws:sqs",
"eventSourceARN": queue_arn,
"awsRegion": self.region_name
}
]
}
request_headers = {}
response_headers = {}
func.invoke(json.dumps(event), request_headers, response_headers)
return 'x-amz-function-error' not in response_headers
def send_sns_message(self, function_name, message, subject=None, qualifier=None):
event = {
"Records": [
{

View File

@ -39,6 +39,31 @@ class LambdaResponse(BaseResponse):
else:
raise ValueError("Cannot handle request")
def event_source_mappings(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'GET':
querystring = self.querystring
event_source_arn = querystring.get('EventSourceArn', [None])[0]
function_name = querystring.get('FunctionName', [None])[0]
return self._list_event_source_mappings(event_source_arn, function_name)
elif request.method == 'POST':
return self._create_event_source_mapping(request, full_url, headers)
else:
raise ValueError("Cannot handle request")
def event_source_mapping(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
path = request.path if hasattr(request, 'path') else path_url(request.url)
uuid = path.split('/')[-1]
if request.method == 'GET':
return self._get_event_source_mapping(uuid)
elif request.method == 'PUT':
return self._update_event_source_mapping(uuid)
elif request.method == 'DELETE':
return self._delete_event_source_mapping(uuid)
else:
raise ValueError("Cannot handle request")
def function(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'GET':
@ -177,6 +202,45 @@ class LambdaResponse(BaseResponse):
config = fn.get_configuration()
return 201, {}, json.dumps(config)
def _create_event_source_mapping(self, request, full_url, headers):
try:
fn = self.lambda_backend.create_event_source_mapping(self.json_body)
except ValueError as e:
return 400, {}, json.dumps({"Error": {"Code": e.args[0], "Message": e.args[1]}})
else:
config = fn.get_configuration()
return 201, {}, json.dumps(config)
def _list_event_source_mappings(self, event_source_arn, function_name):
esms = self.lambda_backend.list_event_source_mappings(event_source_arn, function_name)
result = {
'EventSourceMappings': [esm.get_configuration() for esm in esms]
}
return 200, {}, json.dumps(result)
def _get_event_source_mapping(self, uuid):
result = self.lambda_backend.get_event_source_mapping(uuid)
if result:
return 200, {}, json.dumps(result.get_configuration())
else:
return 404, {}, "{}"
def _update_event_source_mapping(self, uuid):
result = self.lambda_backend.update_event_source_mapping(uuid, self.json_body)
if result:
return 202, {}, json.dumps(result.get_configuration())
else:
return 404, {}, "{}"
def _delete_event_source_mapping(self, uuid):
esm = self.lambda_backend.delete_event_source_mapping(uuid)
if esm:
json_result = esm.get_configuration()
json_result.update({'State': 'Deleting'})
return 202, {}, json.dumps(json_result)
else:
return 404, {}, "{}"
def _publish_function(self, request, full_url, headers):
function_name = self.path.rsplit('/', 2)[-2]

View File

@ -11,6 +11,8 @@ url_paths = {
'{0}/(?P<api_version>[^/]+)/functions/?$': response.root,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/?$': response.function,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/versions/?$': response.versions,
r'{0}/(?P<api_version>[^/]+)/event-source-mappings/?$': response.event_source_mappings,
r'{0}/(?P<api_version>[^/]+)/event-source-mappings/(?P<UUID>[\w_-]+)/?$': response.event_source_mapping,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/invocations/?$': response.invoke,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/invoke-async/?$': response.invoke_async,
r'{0}/(?P<api_version>[^/]+)/tags/(?P<resource_arn>.+)': response.tag,

View File

@ -106,7 +106,7 @@ class AssumedRoleAccessKey(object):
self._access_key_id = access_key_id
self._secret_access_key = assumed_role.secret_access_key
self._session_token = assumed_role.session_token
self._owner_role_name = assumed_role.arn.split("/")[-1]
self._owner_role_name = assumed_role.role_arn.split("/")[-1]
self._session_name = assumed_role.session_name
if headers["X-Amz-Security-Token"] != self._session_token:
raise CreateAccessKeyFailure(reason="InvalidToken")
@ -172,6 +172,8 @@ class IAMRequestBase(object):
self._raise_signature_does_not_match()
def check_action_permitted(self):
if self._action == 'sts:GetCallerIdentity': # always allowed, even if there's an explicit Deny for it
return True
policies = self._access_key.collect_policies()
permitted = False

View File

@ -318,6 +318,9 @@ class DynamoHandler(BaseResponse):
for table_name, table_request in table_batches.items():
keys = table_request['Keys']
if self._contains_duplicates(keys):
er = 'com.amazon.coral.validate#ValidationException'
return self.error(er, 'Provided list of item keys contains duplicates')
attributes_to_get = table_request.get('AttributesToGet')
results["Responses"][table_name] = []
for key in keys:
@ -333,6 +336,15 @@ class DynamoHandler(BaseResponse):
})
return dynamo_json_dump(results)
def _contains_duplicates(self, keys):
unique_keys = []
for k in keys:
if k in unique_keys:
return True
else:
unique_keys.append(k)
return False
def query(self):
name = self.body['TableName']
# {u'KeyConditionExpression': u'#n0 = :v0', u'ExpressionAttributeValues': {u':v0': {u'S': u'johndoe'}}, u'ExpressionAttributeNames': {u'#n0': u'username'}}

View File

@ -463,10 +463,13 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
else:
result_folders, is_truncated, next_continuation_token = self._truncate_result(result_folders, max_keys)
key_count = len(result_keys) + len(result_folders)
return template.render(
bucket=bucket,
prefix=prefix or '',
delimiter=delimiter,
key_count=key_count,
result_keys=result_keys,
result_folders=result_folders,
fetch_owner=fetch_owner,
@ -1330,7 +1333,7 @@ S3_BUCKET_GET_RESPONSE_V2 = """<?xml version="1.0" encoding="UTF-8"?>
<Name>{{ bucket.name }}</Name>
<Prefix>{{ prefix }}</Prefix>
<MaxKeys>{{ max_keys }}</MaxKeys>
<KeyCount>{{ result_keys | length }}</KeyCount>
<KeyCount>{{ key_count }}</KeyCount>
{% if delimiter %}
<Delimiter>{{ delimiter }}</Delimiter>
{% endif %}

View File

@ -119,7 +119,7 @@ class Subscription(BaseModel):
else:
assert False
lambda_backends[region].send_message(function_name, message, subject=subject, qualifier=qualifier)
lambda_backends[region].send_sns_message(function_name, message, subject=subject, qualifier=qualifier)
def _matches_filter_policy(self, message_attributes):
# TODO: support Anything-but matching, prefix matching and

View File

@ -189,6 +189,8 @@ class Queue(BaseModel):
self.name)
self.dead_letter_queue = None
self.lambda_event_source_mappings = {}
# default settings for a non fifo queue
defaults = {
'ContentBasedDeduplication': 'false',
@ -360,6 +362,33 @@ class Queue(BaseModel):
def add_message(self, message):
self._messages.append(message)
from moto.awslambda import lambda_backends
for arn, esm in self.lambda_event_source_mappings.items():
backend = sqs_backends[self.region]
"""
Lambda polls the queue and invokes your function synchronously with an event
that contains queue messages. Lambda reads messages in batches and invokes
your function once for each batch. When your function successfully processes
a batch, Lambda deletes its messages from the queue.
"""
messages = backend.receive_messages(
self.name,
esm.batch_size,
self.receive_message_wait_time_seconds,
self.visibility_timeout,
)
result = lambda_backends[self.region].send_sqs_batch(
arn,
messages,
self.queue_arn,
)
if result:
[backend.delete_message(self.name, m.receipt_handle) for m in messages]
else:
[backend.change_message_visibility(self.name, m.receipt_handle, 0) for m in messages]
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException

View File

@ -2,7 +2,8 @@ from __future__ import unicode_literals
import datetime
from moto.core import BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds
from moto.sts.utils import random_access_key_id, random_secret_access_key, random_session_token
from moto.iam.models import ACCOUNT_ID
from moto.sts.utils import random_access_key_id, random_secret_access_key, random_session_token, random_assumed_role_id
class Token(BaseModel):
@ -22,7 +23,7 @@ class AssumedRole(BaseModel):
def __init__(self, role_session_name, role_arn, policy, duration, external_id):
self.session_name = role_session_name
self.arn = role_arn
self.role_arn = role_arn
self.policy = policy
now = datetime.datetime.utcnow()
self.expiration = now + datetime.timedelta(seconds=duration)
@ -30,11 +31,24 @@ class AssumedRole(BaseModel):
self.access_key_id = "ASIA" + random_access_key_id()
self.secret_access_key = random_secret_access_key()
self.session_token = random_session_token()
self.assumed_role_id = "AROA" + random_assumed_role_id()
@property
def expiration_ISO8601(self):
return iso_8601_datetime_with_milliseconds(self.expiration)
@property
def user_id(self):
return self.assumed_role_id + ":" + self.session_name
@property
def arn(self):
return "arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format(
account_id=ACCOUNT_ID,
role_name=self.role_arn.split("/")[-1],
session_name=self.session_name
)
class STSBackend(BaseBackend):
@ -54,6 +68,12 @@ class STSBackend(BaseBackend):
self.assumed_roles.append(role)
return role
def get_assumed_role_from_access_key(self, access_key_id):
for assumed_role in self.assumed_roles:
if assumed_role.access_key_id == access_key_id:
return assumed_role
return None
def assume_role_with_web_identity(self, **kwargs):
return self.assume_role(**kwargs)

View File

@ -1,6 +1,8 @@
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from moto.iam.models import ACCOUNT_ID
from moto.iam import iam_backend
from .exceptions import STSValidationError
from .models import sts_backend
@ -31,7 +33,7 @@ class TokenResponse(BaseResponse):
token = sts_backend.get_federation_token(
duration=duration, name=name, policy=policy)
template = self.response_template(GET_FEDERATION_TOKEN_RESPONSE)
return template.render(token=token)
return template.render(token=token, account_id=ACCOUNT_ID)
def assume_role(self):
role_session_name = self.querystring.get('RoleSessionName')[0]
@ -71,7 +73,23 @@ class TokenResponse(BaseResponse):
def get_caller_identity(self):
template = self.response_template(GET_CALLER_IDENTITY_RESPONSE)
return template.render()
# Default values in case the request does not use valid credentials generated by moto
user_id = "AKIAIOSFODNN7EXAMPLE"
arn = "arn:aws:sts::{account_id}:user/moto".format(account_id=ACCOUNT_ID)
access_key_id = self.get_current_user()
assumed_role = sts_backend.get_assumed_role_from_access_key(access_key_id)
if assumed_role:
user_id = assumed_role.user_id
arn = assumed_role.arn
user = iam_backend.get_user_from_access_key_id(access_key_id)
if user:
user_id = user.id
arn = user.arn
return template.render(account_id=ACCOUNT_ID, user_id=user_id, arn=arn)
GET_SESSION_TOKEN_RESPONSE = """<GetSessionTokenResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
@ -99,8 +117,8 @@ GET_FEDERATION_TOKEN_RESPONSE = """<GetFederationTokenResponse xmlns="https://st
<AccessKeyId>AKIAIOSFODNN7EXAMPLE</AccessKeyId>
</Credentials>
<FederatedUser>
<Arn>arn:aws:sts::123456789012:federated-user/{{ token.name }}</Arn>
<FederatedUserId>123456789012:{{ token.name }}</FederatedUserId>
<Arn>arn:aws:sts::{{ account_id }}:federated-user/{{ token.name }}</Arn>
<FederatedUserId>{{ account_id }}:{{ token.name }}</FederatedUserId>
</FederatedUser>
<PackedPolicySize>6</PackedPolicySize>
</GetFederationTokenResult>
@ -121,7 +139,7 @@ ASSUME_ROLE_RESPONSE = """<AssumeRoleResponse xmlns="https://sts.amazonaws.com/d
</Credentials>
<AssumedRoleUser>
<Arn>{{ role.arn }}</Arn>
<AssumedRoleId>ARO123EXAMPLE123:{{ role.session_name }}</AssumedRoleId>
<AssumedRoleId>{{ role.user_id }}</AssumedRoleId>
</AssumedRoleUser>
<PackedPolicySize>6</PackedPolicySize>
</AssumeRoleResult>
@ -153,9 +171,9 @@ ASSUME_ROLE_WITH_WEB_IDENTITY_RESPONSE = """<AssumeRoleWithWebIdentityResponse x
GET_CALLER_IDENTITY_RESPONSE = """<GetCallerIdentityResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
<GetCallerIdentityResult>
<Arn>arn:aws:sts::123456789012:user/moto</Arn>
<UserId>AKIAIOSFODNN7EXAMPLE</UserId>
<Account>123456789012</Account>
<Arn>{{ arn }}</Arn>
<UserId>{{ user_id }}</UserId>
<Account>{{ account_id }}</Account>
</GetCallerIdentityResult>
<ResponseMetadata>
<RequestId>c6104cbe-af31-11e0-8154-cbc7ccf896c7</RequestId>

View File

@ -6,15 +6,12 @@ import string
import six
ACCOUNT_SPECIFIC_ACCESS_KEY_PREFIX = "8NWMTLYQ"
ACCOUNT_SPECIFIC_ASSUMED_ROLE_ID_PREFIX = "3X42LBCD"
SESSION_TOKEN_PREFIX = "FQoGZXIvYXdzEBYaD"
def random_access_key_id():
return ACCOUNT_SPECIFIC_ACCESS_KEY_PREFIX + ''.join(six.text_type(
random.choice(
string.ascii_uppercase + string.digits
)) for _ in range(8)
)
return ACCOUNT_SPECIFIC_ACCESS_KEY_PREFIX + _random_uppercase_or_digit_sequence(8)
def random_secret_access_key():
@ -23,3 +20,16 @@ def random_secret_access_key():
def random_session_token():
return SESSION_TOKEN_PREFIX + base64.b64encode(os.urandom(266))[len(SESSION_TOKEN_PREFIX):].decode()
def random_assumed_role_id():
return ACCOUNT_SPECIFIC_ASSUMED_ROLE_ID_PREFIX + _random_uppercase_or_digit_sequence(9)
def _random_uppercase_or_digit_sequence(length):
return ''.join(
six.text_type(
random.choice(
string.ascii_uppercase + string.digits
)) for _ in range(length)
)

View File

@ -1,6 +1,7 @@
from __future__ import unicode_literals
import base64
import uuid
import botocore.client
import boto3
import hashlib
@ -11,11 +12,12 @@ import zipfile
import sure # noqa
from freezegun import freeze_time
from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings
from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings, mock_sqs
from nose.tools import assert_raises
from botocore.exceptions import ClientError
_lambda_region = 'us-west-2'
boto3.setup_default_session(region_name=_lambda_region)
def _process_lambda(func_str):
@ -59,6 +61,13 @@ def lambda_handler(event, context):
"""
return _process_lambda(pfunc)
def get_test_zip_file4():
pfunc = """
def lambda_handler(event, context):
raise Exception('I failed!')
"""
return _process_lambda(pfunc)
@mock_lambda
def test_list_functions():
@ -933,3 +942,306 @@ def test_list_versions_by_function_for_nonexistent_function():
versions = conn.list_versions_by_function(FunctionName='testFunction')
assert len(versions['Versions']) == 0
@mock_logs
@mock_lambda
@mock_sqs
def test_create_event_source_mapping():
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client('lambda')
func = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file3(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes['QueueArn'],
FunctionName=func['FunctionArn'],
)
assert response['EventSourceArn'] == queue.attributes['QueueArn']
assert response['FunctionArn'] == func['FunctionArn']
assert response['State'] == 'Enabled'
@mock_logs
@mock_lambda
@mock_sqs
def test_invoke_function_from_sqs():
logs_conn = boto3.client("logs")
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client('lambda')
func = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file3(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes['QueueArn'],
FunctionName=func['FunctionArn'],
)
assert response['EventSourceArn'] == queue.attributes['QueueArn']
assert response['State'] == 'Enabled'
sqs_client = boto3.client('sqs')
sqs_client.send_message(QueueUrl=queue.url, MessageBody='test')
start = time.time()
while (time.time() - start) < 30:
result = logs_conn.describe_log_streams(logGroupName='/aws/lambda/testFunction')
log_streams = result.get('logStreams')
if not log_streams:
time.sleep(1)
continue
assert len(log_streams) == 1
result = logs_conn.get_log_events(logGroupName='/aws/lambda/testFunction', logStreamName=log_streams[0]['logStreamName'])
for event in result.get('events'):
if event['message'] == 'get_test_zip_file3 success':
return
time.sleep(1)
assert False, "Test Failed"
@mock_logs
@mock_lambda
@mock_sqs
def test_invoke_function_from_sqs_exception():
logs_conn = boto3.client("logs")
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client('lambda')
func = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file4(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes['QueueArn'],
FunctionName=func['FunctionArn'],
)
assert response['EventSourceArn'] == queue.attributes['QueueArn']
assert response['State'] == 'Enabled'
entries = []
for i in range(3):
body = {
"uuid": str(uuid.uuid4()),
"test": "test_{}".format(i),
}
entry = {
'Id': str(i),
'MessageBody': json.dumps(body)
}
entries.append(entry)
queue.send_messages(Entries=entries)
start = time.time()
while (time.time() - start) < 30:
result = logs_conn.describe_log_streams(logGroupName='/aws/lambda/testFunction')
log_streams = result.get('logStreams')
if not log_streams:
time.sleep(1)
continue
assert len(log_streams) >= 1
result = logs_conn.get_log_events(logGroupName='/aws/lambda/testFunction', logStreamName=log_streams[0]['logStreamName'])
for event in result.get('events'):
if 'I failed!' in event['message']:
messages = queue.receive_messages(MaxNumberOfMessages=10)
# Verify messages are still visible and unprocessed
assert len(messages) is 3
return
time.sleep(1)
assert False, "Test Failed"
@mock_logs
@mock_lambda
@mock_sqs
def test_list_event_source_mappings():
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client('lambda')
func = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file3(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes['QueueArn'],
FunctionName=func['FunctionArn'],
)
mappings = conn.list_event_source_mappings(EventSourceArn='123')
assert len(mappings['EventSourceMappings']) == 0
mappings = conn.list_event_source_mappings(EventSourceArn=queue.attributes['QueueArn'])
assert len(mappings['EventSourceMappings']) == 1
assert mappings['EventSourceMappings'][0]['UUID'] == response['UUID']
assert mappings['EventSourceMappings'][0]['FunctionArn'] == func['FunctionArn']
@mock_lambda
@mock_sqs
def test_get_event_source_mapping():
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client('lambda')
func = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file3(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes['QueueArn'],
FunctionName=func['FunctionArn'],
)
mapping = conn.get_event_source_mapping(UUID=response['UUID'])
assert mapping['UUID'] == response['UUID']
assert mapping['FunctionArn'] == func['FunctionArn']
conn.get_event_source_mapping.when.called_with(UUID='1')\
.should.throw(botocore.client.ClientError)
@mock_lambda
@mock_sqs
def test_update_event_source_mapping():
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client('lambda')
func1 = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file3(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
func2 = conn.create_function(
FunctionName='testFunction2',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file3(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes['QueueArn'],
FunctionName=func1['FunctionArn'],
)
assert response['FunctionArn'] == func1['FunctionArn']
assert response['BatchSize'] == 10
assert response['State'] == 'Enabled'
mapping = conn.update_event_source_mapping(
UUID=response['UUID'],
Enabled=False,
BatchSize=15,
FunctionName='testFunction2'
)
assert mapping['UUID'] == response['UUID']
assert mapping['FunctionArn'] == func2['FunctionArn']
assert mapping['State'] == 'Disabled'
@mock_lambda
@mock_sqs
def test_delete_event_source_mapping():
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName="test-sqs-queue1")
conn = boto3.client('lambda')
func1 = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file3(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
response = conn.create_event_source_mapping(
EventSourceArn=queue.attributes['QueueArn'],
FunctionName=func1['FunctionArn'],
)
assert response['FunctionArn'] == func1['FunctionArn']
assert response['BatchSize'] == 10
assert response['State'] == 'Enabled'
response = conn.delete_event_source_mapping(UUID=response['UUID'])
assert response['State'] == 'Deleting'
conn.get_event_source_mapping.when.called_with(UUID=response['UUID'])\
.should.throw(botocore.client.ClientError)

View File

@ -593,9 +593,11 @@ def test_create_stack_lambda_and_dynamodb():
}
},
"func1version": {
"Type": "AWS::Lambda::LambdaVersion",
"Properties" : {
"Version": "v1.2.3"
"Type": "AWS::Lambda::Version",
"Properties": {
"FunctionName": {
"Ref": "func1"
}
}
},
"tab1": {
@ -618,8 +620,10 @@ def test_create_stack_lambda_and_dynamodb():
},
"func1mapping": {
"Type": "AWS::Lambda::EventSourceMapping",
"Properties" : {
"FunctionName": "v1.2.3",
"Properties": {
"FunctionName": {
"Ref": "func1"
},
"EventSourceArn": "arn:aws:dynamodb:region:XXXXXX:table/tab1/stream/2000T00:00:00.000",
"StartingPosition": "0",
"BatchSize": 100,

View File

@ -273,6 +273,27 @@ def test_access_denied_with_denying_policy():
)
@set_initial_no_auth_action_count(3)
@mock_sts
def test_get_caller_identity_allowed_with_denying_policy():
user_name = 'test-user'
inline_policy_document = {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Deny",
"Action": "sts:GetCallerIdentity",
"Resource": "*"
}
]
}
access_key = create_user_with_access_key_and_inline_policy(user_name, inline_policy_document)
client = boto3.client('sts', region_name='us-east-1',
aws_access_key_id=access_key['AccessKeyId'],
aws_secret_access_key=access_key['SecretAccessKey'])
client.get_caller_identity().should.be.a(dict)
@set_initial_no_auth_action_count(3)
@mock_ec2
def test_allowed_with_wildcard_action():

View File

@ -2141,3 +2141,55 @@ def test_scan_by_non_exists_index():
ex.exception.response['Error']['Message'].should.equal(
'The table does not have the specified index: non_exists_index'
)
@mock_dynamodb2
def test_batch_items_returns_all():
dynamodb = _create_user_table()
returned_items = dynamodb.batch_get_item(RequestItems={
'users': {
'Keys': [{
'username': {'S': 'user0'}
}, {
'username': {'S': 'user1'}
}, {
'username': {'S': 'user2'}
}, {
'username': {'S': 'user3'}
}],
'ConsistentRead': True
}
})['Responses']['users']
assert len(returned_items) == 3
assert [item['username']['S'] for item in returned_items] == ['user1', 'user2', 'user3']
@mock_dynamodb2
def test_batch_items_should_throw_exception_for_duplicate_request():
client = _create_user_table()
with assert_raises(ClientError) as ex:
client.batch_get_item(RequestItems={
'users': {
'Keys': [{
'username': {'S': 'user0'}
}, {
'username': {'S': 'user0'}
}],
'ConsistentRead': True
}})
ex.exception.response['Error']['Code'].should.equal('ValidationException')
ex.exception.response['Error']['Message'].should.equal('Provided list of item keys contains duplicates')
def _create_user_table():
client = boto3.client('dynamodb', region_name='us-east-1')
client.create_table(
TableName='users',
KeySchema=[{'AttributeName': 'username', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'username', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5}
)
client.put_item(TableName='users', Item={'username': {'S': 'user1'}, 'foo': {'S': 'bar'}})
client.put_item(TableName='users', Item={'username': {'S': 'user2'}, 'foo': {'S': 'bar'}})
client.put_item(TableName='users', Item={'username': {'S': 'user3'}, 'foo': {'S': 'bar'}})
return client

View File

@ -8,7 +8,9 @@ from freezegun import freeze_time
from nose.tools import assert_raises
import sure # noqa
from moto import mock_sts, mock_sts_deprecated
from moto import mock_sts, mock_sts_deprecated, mock_iam, settings
from moto.iam.models import ACCOUNT_ID
from moto.sts.responses import MAX_FEDERATION_TOKEN_POLICY_LENGTH
@ -29,7 +31,8 @@ def test_get_session_token():
@mock_sts_deprecated
def test_get_federation_token():
conn = boto.connect_sts()
token = conn.get_federation_token(duration=123, name="Bob")
token_name = "Bob"
token = conn.get_federation_token(duration=123, name=token_name)
token.credentials.expiration.should.equal('2012-01-01T12:02:03.000Z')
token.credentials.session_token.should.equal(
@ -38,15 +41,17 @@ def test_get_federation_token():
token.credentials.secret_key.should.equal(
"wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY")
token.federated_user_arn.should.equal(
"arn:aws:sts::123456789012:federated-user/Bob")
token.federated_user_id.should.equal("123456789012:Bob")
"arn:aws:sts::{account_id}:federated-user/{token_name}".format(account_id=ACCOUNT_ID, token_name=token_name))
token.federated_user_id.should.equal(str(ACCOUNT_ID) + ":" + token_name)
@freeze_time("2012-01-01 12:00:00")
@mock_sts_deprecated
@mock_sts
def test_assume_role():
conn = boto.connect_sts()
client = boto3.client(
"sts", region_name='us-east-1')
session_name = "session-name"
policy = json.dumps({
"Statement": [
{
@ -61,20 +66,25 @@ def test_assume_role():
},
]
})
s3_role = "arn:aws:iam::123456789012:role/test-role"
role = conn.assume_role(s3_role, "session-name",
policy, duration_seconds=123)
role_name = "test-role"
s3_role = "arn:aws:iam::{account_id}:role/{role_name}".format(account_id=ACCOUNT_ID, role_name=role_name)
assume_role_response = client.assume_role(RoleArn=s3_role, RoleSessionName=session_name,
Policy=policy, DurationSeconds=900)
credentials = role.credentials
credentials.expiration.should.equal('2012-01-01T12:02:03.000Z')
credentials.session_token.should.have.length_of(356)
assert credentials.session_token.startswith("FQoGZXIvYXdzE")
credentials.access_key.should.have.length_of(20)
assert credentials.access_key.startswith("ASIA")
credentials.secret_key.should.have.length_of(40)
credentials = assume_role_response['Credentials']
if not settings.TEST_SERVER_MODE:
credentials['Expiration'].isoformat().should.equal('2012-01-01T12:15:00+00:00')
credentials['SessionToken'].should.have.length_of(356)
assert credentials['SessionToken'].startswith("FQoGZXIvYXdzE")
credentials['AccessKeyId'].should.have.length_of(20)
assert credentials['AccessKeyId'].startswith("ASIA")
credentials['SecretAccessKey'].should.have.length_of(40)
role.user.arn.should.equal("arn:aws:iam::123456789012:role/test-role")
role.user.assume_role_id.should.contain("session-name")
assume_role_response['AssumedRoleUser']['Arn'].should.equal("arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format(
account_id=ACCOUNT_ID, role_name=role_name, session_name=session_name))
assert assume_role_response['AssumedRoleUser']['AssumedRoleId'].startswith("AROA")
assert assume_role_response['AssumedRoleUser']['AssumedRoleId'].endswith(":" + session_name)
assume_role_response['AssumedRoleUser']['AssumedRoleId'].should.have.length_of(21 + 1 + len(session_name))
@freeze_time("2012-01-01 12:00:00")
@ -96,9 +106,11 @@ def test_assume_role_with_web_identity():
},
]
})
s3_role = "arn:aws:iam::123456789012:role/test-role"
role_name = "test-role"
s3_role = "arn:aws:iam::{account_id}:role/{role_name}".format(account_id=ACCOUNT_ID, role_name=role_name)
session_name = "session-name"
role = conn.assume_role_with_web_identity(
s3_role, "session-name", policy, duration_seconds=123)
s3_role, session_name, policy, duration_seconds=123)
credentials = role.credentials
credentials.expiration.should.equal('2012-01-01T12:02:03.000Z')
@ -108,18 +120,68 @@ def test_assume_role_with_web_identity():
assert credentials.access_key.startswith("ASIA")
credentials.secret_key.should.have.length_of(40)
role.user.arn.should.equal("arn:aws:iam::123456789012:role/test-role")
role.user.arn.should.equal("arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format(
account_id=ACCOUNT_ID, role_name=role_name, session_name=session_name))
role.user.assume_role_id.should.contain("session-name")
@mock_sts
def test_get_caller_identity():
def test_get_caller_identity_with_default_credentials():
identity = boto3.client(
"sts", region_name='us-east-1').get_caller_identity()
identity['Arn'].should.equal('arn:aws:sts::123456789012:user/moto')
identity['Arn'].should.equal('arn:aws:sts::{account_id}:user/moto'.format(account_id=ACCOUNT_ID))
identity['UserId'].should.equal('AKIAIOSFODNN7EXAMPLE')
identity['Account'].should.equal('123456789012')
identity['Account'].should.equal(str(ACCOUNT_ID))
@mock_sts
@mock_iam
def test_get_caller_identity_with_iam_user_credentials():
iam_client = boto3.client("iam", region_name='us-east-1')
iam_user_name = "new-user"
iam_user = iam_client.create_user(UserName=iam_user_name)['User']
access_key = iam_client.create_access_key(UserName=iam_user_name)['AccessKey']
identity = boto3.client(
"sts", region_name='us-east-1', aws_access_key_id=access_key['AccessKeyId'],
aws_secret_access_key=access_key['SecretAccessKey']).get_caller_identity()
identity['Arn'].should.equal(iam_user['Arn'])
identity['UserId'].should.equal(iam_user['UserId'])
identity['Account'].should.equal(str(ACCOUNT_ID))
@mock_sts
@mock_iam
def test_get_caller_identity_with_assumed_role_credentials():
iam_client = boto3.client("iam", region_name='us-east-1')
sts_client = boto3.client("sts", region_name='us-east-1')
iam_role_name = "new-user"
trust_policy_document = {
"Version": "2012-10-17",
"Statement": {
"Effect": "Allow",
"Principal": {"AWS": "arn:aws:iam::{account_id}:root".format(account_id=ACCOUNT_ID)},
"Action": "sts:AssumeRole"
}
}
iam_role_arn = iam_client.role_arn = iam_client.create_role(
RoleName=iam_role_name,
AssumeRolePolicyDocument=json.dumps(trust_policy_document)
)['Role']['Arn']
session_name = "new-session"
assumed_role = sts_client.assume_role(RoleArn=iam_role_arn,
RoleSessionName=session_name)
access_key = assumed_role['Credentials']
identity = boto3.client(
"sts", region_name='us-east-1', aws_access_key_id=access_key['AccessKeyId'],
aws_secret_access_key=access_key['SecretAccessKey']).get_caller_identity()
identity['Arn'].should.equal(assumed_role['AssumedRoleUser']['Arn'])
identity['UserId'].should.equal(assumed_role['AssumedRoleUser']['AssumedRoleId'])
identity['Account'].should.equal(str(ACCOUNT_ID))
@mock_sts