Merge remote-tracking branch 'spulec/master'
This commit is contained in:
commit
f763de85fb
@ -1906,7 +1906,7 @@
|
||||
- [X] delete_role
|
||||
- [X] delete_role_policy
|
||||
- [ ] delete_saml_provider
|
||||
- [ ] delete_server_certificate
|
||||
- [X] delete_server_certificate
|
||||
- [ ] delete_service_linked_role
|
||||
- [ ] delete_service_specific_credential
|
||||
- [ ] delete_signing_certificate
|
||||
|
@ -111,16 +111,16 @@ class AWSCertificateManagerResponse(BaseResponse):
|
||||
# actual data
|
||||
try:
|
||||
certificate = base64.standard_b64decode(certificate)
|
||||
except:
|
||||
except Exception:
|
||||
return AWSValidationException('The certificate is not PEM-encoded or is not valid.').response()
|
||||
try:
|
||||
private_key = base64.standard_b64decode(private_key)
|
||||
except:
|
||||
except Exception:
|
||||
return AWSValidationException('The private key is not PEM-encoded or is not valid.').response()
|
||||
if chain is not None:
|
||||
try:
|
||||
chain = base64.standard_b64decode(chain)
|
||||
except:
|
||||
except Exception:
|
||||
return AWSValidationException('The certificate chain is not PEM-encoded or is not valid.').response()
|
||||
|
||||
try:
|
||||
|
@ -265,14 +265,14 @@ class LambdaFunction(BaseModel):
|
||||
def convert(s):
|
||||
try:
|
||||
return str(s, encoding='utf-8')
|
||||
except:
|
||||
except Exception:
|
||||
return s
|
||||
|
||||
@staticmethod
|
||||
def is_json(test_str):
|
||||
try:
|
||||
response = json.loads(test_str)
|
||||
except:
|
||||
except Exception:
|
||||
response = test_str
|
||||
return response
|
||||
|
||||
|
@ -4,7 +4,7 @@ import json
|
||||
|
||||
try:
|
||||
from urllib import unquote
|
||||
except:
|
||||
except ImportError:
|
||||
from urllib.parse import unquote
|
||||
|
||||
from moto.core.utils import amz_crc32, amzn_request_id
|
||||
|
@ -453,8 +453,8 @@ class ResourceMap(collections.Mapping):
|
||||
resource_name, resource_json, self, self._region_name)
|
||||
self._parsed_resources[resource_name] = new_resource
|
||||
|
||||
removed_resource_nams = set(old_template) - set(new_template)
|
||||
for resource_name in removed_resource_nams:
|
||||
removed_resource_names = set(old_template) - set(new_template)
|
||||
for resource_name in removed_resource_names:
|
||||
resource_json = old_template[resource_name]
|
||||
parse_and_delete_resource(
|
||||
resource_name, resource_json, self, self._region_name)
|
||||
|
@ -161,11 +161,15 @@ class CloudFormationResponse(BaseResponse):
|
||||
def update_stack(self):
|
||||
stack_name = self._get_param('StackName')
|
||||
role_arn = self._get_param('RoleARN')
|
||||
template_url = self._get_param('TemplateURL')
|
||||
if self._get_param('UsePreviousTemplate') == "true":
|
||||
stack_body = self.cloudformation_backend.get_stack(
|
||||
stack_name).template
|
||||
elif template_url:
|
||||
stack_body = self._get_stack_from_s3_url(template_url)
|
||||
else:
|
||||
stack_body = self._get_param('TemplateBody')
|
||||
|
||||
parameters = dict([
|
||||
(parameter['parameter_key'], parameter['parameter_value'])
|
||||
for parameter
|
||||
|
@ -10,6 +10,8 @@ from .utils import make_arn_for_dashboard
|
||||
|
||||
DEFAULT_ACCOUNT_ID = 123456789012
|
||||
|
||||
_EMPTY_LIST = tuple()
|
||||
|
||||
|
||||
class Dimension(object):
|
||||
|
||||
@ -146,14 +148,14 @@ class Statistics:
|
||||
return sum(self.values)
|
||||
|
||||
@property
|
||||
def min(self):
|
||||
def minimum(self):
|
||||
if 'Minimum' not in self.stats:
|
||||
return None
|
||||
|
||||
return min(self.values)
|
||||
|
||||
@property
|
||||
def max(self):
|
||||
def maximum(self):
|
||||
if 'Maximum' not in self.stats:
|
||||
return None
|
||||
|
||||
@ -228,7 +230,7 @@ class CloudWatchBackend(BaseBackend):
|
||||
def put_metric_data(self, namespace, metric_data):
|
||||
for metric_member in metric_data:
|
||||
self.metric_data.append(MetricDatum(
|
||||
namespace, metric_member['MetricName'], float(metric_member['Value']), metric_member['Dimensions.member'], metric_member.get('Timestamp')))
|
||||
namespace, metric_member['MetricName'], float(metric_member['Value']), metric_member.get('Dimensions.member', _EMPTY_LIST), metric_member.get('Timestamp')))
|
||||
|
||||
def get_metric_statistics(self, namespace, metric_name, start_time, end_time, period, stats):
|
||||
period_delta = timedelta(seconds=period)
|
||||
|
@ -276,27 +276,27 @@ GET_METRIC_STATISTICS_TEMPLATE = """<GetMetricStatisticsResponse xmlns="http://m
|
||||
<Datapoints>
|
||||
{% for datapoint in datapoints %}
|
||||
<Datapoint>
|
||||
{% if datapoint.sum %}
|
||||
{% if datapoint.sum is not none %}
|
||||
<Sum>{{ datapoint.sum }}</Sum>
|
||||
{% endif %}
|
||||
|
||||
{% if datapoint.average %}
|
||||
{% if datapoint.average is not none %}
|
||||
<Average>{{ datapoint.average }}</Average>
|
||||
{% endif %}
|
||||
|
||||
{% if datapoint.maximum %}
|
||||
{% if datapoint.maximum is not none %}
|
||||
<Maximum>{{ datapoint.maximum }}</Maximum>
|
||||
{% endif %}
|
||||
|
||||
{% if datapoint.minimum %}
|
||||
{% if datapoint.minimum is not none %}
|
||||
<Minimum>{{ datapoint.minimum }}</Minimum>
|
||||
{% endif %}
|
||||
|
||||
{% if datapoint.sample_count %}
|
||||
{% if datapoint.sample_count is not none %}
|
||||
<SampleCount>{{ datapoint.sample_count }}</SampleCount>
|
||||
{% endif %}
|
||||
|
||||
{% if datapoint.extended_statistics %}
|
||||
{% if datapoint.extended_statistics is not none %}
|
||||
<ExtendedStatistics>{{ datapoint.extended_statistics }}</ExtendedStatistics>
|
||||
{% endif %}
|
||||
|
||||
|
@ -4,6 +4,7 @@ import copy
|
||||
import itertools
|
||||
import ipaddress
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import six
|
||||
import warnings
|
||||
@ -117,7 +118,8 @@ INSTANCE_TYPES = json.load(
|
||||
open(resource_filename(__name__, 'resources/instance_types.json'), 'r')
|
||||
)
|
||||
AMIS = json.load(
|
||||
open(resource_filename(__name__, 'resources/amis.json'), 'r')
|
||||
open(os.environ.get('MOTO_AMIS_PATH') or resource_filename(
|
||||
__name__, 'resources/amis.json'), 'r')
|
||||
)
|
||||
|
||||
|
||||
@ -392,7 +394,9 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
||||
if ami is None:
|
||||
warnings.warn('Could not find AMI with image-id:{0}, '
|
||||
'in the near future this will '
|
||||
'cause an error'.format(image_id),
|
||||
'cause an error.\n'
|
||||
'Use ec2_backend.describe_images() to'
|
||||
'find suitable image for your test'.format(image_id),
|
||||
PendingDeprecationWarning)
|
||||
|
||||
self.platform = ami.platform if ami else None
|
||||
@ -505,6 +509,22 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
||||
instance.add_tag(tag["Key"], tag["Value"])
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def delete_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
|
||||
ec2_backend = ec2_backends[region_name]
|
||||
all_instances = ec2_backend.all_instances()
|
||||
|
||||
# the resource_name for instances is the stack name, logical id, and random suffix separated
|
||||
# by hyphens. So to lookup the instances using the 'aws:cloudformation:logical-id' tag, we need to
|
||||
# extract the logical-id from the resource_name
|
||||
logical_id = resource_name.split('-')[1]
|
||||
|
||||
for instance in all_instances:
|
||||
instance_tags = instance.get_tags()
|
||||
for tag in instance_tags:
|
||||
if tag['key'] == 'aws:cloudformation:logical-id' and tag['value'] == logical_id:
|
||||
instance.delete(region_name)
|
||||
|
||||
@property
|
||||
def physical_resource_id(self):
|
||||
return self.id
|
||||
@ -3702,6 +3722,7 @@ class NatGateway(object):
|
||||
class NatGatewayBackend(object):
|
||||
def __init__(self):
|
||||
self.nat_gateways = {}
|
||||
super(NatGatewayBackend, self).__init__()
|
||||
|
||||
def get_all_nat_gateways(self, filters):
|
||||
return self.nat_gateways.values()
|
||||
|
@ -4,6 +4,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -20,6 +21,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -36,6 +38,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -52,6 +55,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "099720109477",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -68,6 +72,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -84,6 +89,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -100,6 +106,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -116,6 +123,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "013907871322",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -132,6 +140,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -148,6 +157,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -163,6 +173,7 @@
|
||||
"ami_id": "ami-56ec3e2f",
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"image_location": "amazon/getting-started",
|
||||
"owner_id": "801119661308",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
@ -180,6 +191,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -196,6 +208,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -212,6 +225,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "137112412989",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/xvda",
|
||||
@ -228,6 +242,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -244,6 +259,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "099720109477",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -260,6 +276,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "137112412989",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -276,6 +293,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -292,6 +310,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -308,6 +327,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "898082745236",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/xvda",
|
||||
@ -324,6 +344,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "898082745236",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -340,6 +361,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -356,6 +378,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -372,6 +395,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -388,6 +412,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "309956199498",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -404,6 +429,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -420,6 +446,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -436,6 +463,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -452,6 +480,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -468,6 +497,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "898082745236",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -484,6 +514,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -500,6 +531,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "801119661308",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda1",
|
||||
@ -516,6 +548,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "898082745236",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/xvda",
|
||||
@ -532,6 +565,7 @@
|
||||
"state": "available",
|
||||
"public": true,
|
||||
"owner_id": "013907871322",
|
||||
"image_location": "amazon/getting-started",
|
||||
"sriov": "simple",
|
||||
"root_device_type": "ebs",
|
||||
"root_device_name": "/dev/sda",
|
||||
|
@ -663,6 +663,20 @@ class IAMBackend(BaseBackend):
|
||||
"The Server Certificate with name {0} cannot be "
|
||||
"found.".format(name))
|
||||
|
||||
def delete_server_certificate(self, name):
|
||||
cert_id = None
|
||||
for key, cert in self.certificates.items():
|
||||
if name == cert.cert_name:
|
||||
cert_id = key
|
||||
break
|
||||
|
||||
if cert_id is None:
|
||||
raise IAMNotFoundException(
|
||||
"The Server Certificate with name {0} cannot be "
|
||||
"found.".format(name))
|
||||
|
||||
self.certificates.pop(cert_id, None)
|
||||
|
||||
def create_group(self, group_name, path='/'):
|
||||
if group_name in self.groups:
|
||||
raise IAMConflictException(
|
||||
|
@ -271,6 +271,12 @@ class IamResponse(BaseResponse):
|
||||
template = self.response_template(GET_SERVER_CERTIFICATE_TEMPLATE)
|
||||
return template.render(certificate=cert)
|
||||
|
||||
def delete_server_certificate(self):
|
||||
cert_name = self._get_param('ServerCertificateName')
|
||||
iam_backend.delete_server_certificate(cert_name)
|
||||
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
|
||||
return template.render(name="DeleteServerCertificate")
|
||||
|
||||
def create_group(self):
|
||||
group_name = self._get_param('GroupName')
|
||||
path = self._get_param('Path')
|
||||
|
@ -12,6 +12,7 @@ from hashlib import md5
|
||||
|
||||
from moto.compat import OrderedDict
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.utils import unix_time
|
||||
from .exceptions import StreamNotFoundError, ShardNotFoundError, ResourceInUseError, \
|
||||
ResourceNotFoundError, InvalidArgumentError
|
||||
from .utils import compose_shard_iterator, compose_new_shard_iterator, decompose_shard_iterator
|
||||
@ -24,6 +25,7 @@ class Record(BaseModel):
|
||||
self.data = data
|
||||
self.sequence_number = sequence_number
|
||||
self.explicit_hash_key = explicit_hash_key
|
||||
self.create_at = unix_time()
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
@ -80,6 +82,15 @@ class Shard(BaseModel):
|
||||
return list(self.records.keys())[-1]
|
||||
return 0
|
||||
|
||||
def get_sequence_number_at(self, at_timestamp):
|
||||
if not self.records or at_timestamp < list(self.records.values())[0].create_at:
|
||||
return 0
|
||||
else:
|
||||
# find the last item in the list that was created before
|
||||
# at_timestamp
|
||||
r = next((r for r in reversed(self.records.values()) if r.create_at < at_timestamp), None)
|
||||
return r.sequence_number
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
"HashKeyRange": {
|
||||
@ -300,13 +311,14 @@ class KinesisBackend(BaseBackend):
|
||||
return self.streams.pop(stream_name)
|
||||
raise StreamNotFoundError(stream_name)
|
||||
|
||||
def get_shard_iterator(self, stream_name, shard_id, shard_iterator_type, starting_sequence_number):
|
||||
def get_shard_iterator(self, stream_name, shard_id, shard_iterator_type, starting_sequence_number,
|
||||
at_timestamp):
|
||||
# Validate params
|
||||
stream = self.describe_stream(stream_name)
|
||||
shard = stream.get_shard(shard_id)
|
||||
|
||||
shard_iterator = compose_new_shard_iterator(
|
||||
stream_name, shard, shard_iterator_type, starting_sequence_number
|
||||
stream_name, shard, shard_iterator_type, starting_sequence_number, at_timestamp
|
||||
)
|
||||
return shard_iterator
|
||||
|
||||
|
@ -66,9 +66,10 @@ class KinesisResponse(BaseResponse):
|
||||
shard_iterator_type = self.parameters.get("ShardIteratorType")
|
||||
starting_sequence_number = self.parameters.get(
|
||||
"StartingSequenceNumber")
|
||||
at_timestamp = self.parameters.get("Timestamp")
|
||||
|
||||
shard_iterator = self.kinesis_backend.get_shard_iterator(
|
||||
stream_name, shard_id, shard_iterator_type, starting_sequence_number,
|
||||
stream_name, shard_id, shard_iterator_type, starting_sequence_number, at_timestamp
|
||||
)
|
||||
|
||||
return json.dumps({
|
||||
|
@ -3,7 +3,8 @@ import base64
|
||||
from .exceptions import InvalidArgumentError
|
||||
|
||||
|
||||
def compose_new_shard_iterator(stream_name, shard, shard_iterator_type, starting_sequence_number):
|
||||
def compose_new_shard_iterator(stream_name, shard, shard_iterator_type, starting_sequence_number,
|
||||
at_timestamp):
|
||||
if shard_iterator_type == "AT_SEQUENCE_NUMBER":
|
||||
last_sequence_id = int(starting_sequence_number) - 1
|
||||
elif shard_iterator_type == "AFTER_SEQUENCE_NUMBER":
|
||||
@ -12,6 +13,8 @@ def compose_new_shard_iterator(stream_name, shard, shard_iterator_type, starting
|
||||
last_sequence_id = 0
|
||||
elif shard_iterator_type == "LATEST":
|
||||
last_sequence_id = shard.get_max_sequence_number()
|
||||
elif shard_iterator_type == "AT_TIMESTAMP":
|
||||
last_sequence_id = shard.get_sequence_number_at(at_timestamp)
|
||||
else:
|
||||
raise InvalidArgumentError(
|
||||
"Invalid ShardIteratorType: {0}".format(shard_iterator_type))
|
||||
|
@ -3,12 +3,12 @@ from six.moves.urllib.parse import urlparse
|
||||
|
||||
|
||||
def bucket_name_from_url(url):
|
||||
pth = urlparse(url).path.lstrip("/")
|
||||
path = urlparse(url).path.lstrip("/")
|
||||
|
||||
l = pth.lstrip("/").split("/")
|
||||
if len(l) == 0 or l[0] == "":
|
||||
parts = path.lstrip("/").split("/")
|
||||
if len(parts) == 0 or parts[0] == "":
|
||||
return None
|
||||
return l[0]
|
||||
return parts[0]
|
||||
|
||||
|
||||
def parse_key_name(path):
|
||||
|
@ -3,7 +3,7 @@ mock
|
||||
nose
|
||||
sure==1.2.24
|
||||
coverage
|
||||
flake8==3.4.1
|
||||
flake8==3.5.0
|
||||
freezegun
|
||||
flask
|
||||
boto>=2.45.0
|
||||
|
@ -1,18 +1,13 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
import boto.s3
|
||||
import boto.s3.key
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import mock_cloudformation, mock_s3, mock_sqs
|
||||
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
import random
|
||||
|
||||
from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2
|
||||
|
||||
dummy_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
@ -39,7 +34,6 @@ dummy_template = {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
dummy_template_yaml = """---
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: Stack1 with yaml template
|
||||
@ -57,7 +51,6 @@ Resources:
|
||||
Value: Name tag for tests
|
||||
"""
|
||||
|
||||
|
||||
dummy_template_yaml_with_short_form_func = """---
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: Stack1 with yaml template
|
||||
@ -75,7 +68,6 @@ Resources:
|
||||
Value: Name tag for tests
|
||||
"""
|
||||
|
||||
|
||||
dummy_template_yaml_with_ref = """---
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: Stack1 with yaml template
|
||||
@ -100,7 +92,6 @@ Resources:
|
||||
Value: !Ref TagName
|
||||
"""
|
||||
|
||||
|
||||
dummy_update_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Parameters": {
|
||||
@ -131,12 +122,12 @@ dummy_output_template = {
|
||||
}
|
||||
}
|
||||
},
|
||||
"Outputs" : {
|
||||
"StackVPC" : {
|
||||
"Description" : "The ID of the VPC",
|
||||
"Value" : "VPCID",
|
||||
"Export" : {
|
||||
"Name" : "My VPC ID"
|
||||
"Outputs": {
|
||||
"StackVPC": {
|
||||
"Description": "The ID of the VPC",
|
||||
"Value": "VPCID",
|
||||
"Export": {
|
||||
"Name": "My VPC ID"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -156,7 +147,7 @@ dummy_import_template = {
|
||||
}
|
||||
|
||||
dummy_template_json = json.dumps(dummy_template)
|
||||
dummy_update_template_json = json.dumps(dummy_template)
|
||||
dummy_update_template_json = json.dumps(dummy_update_template)
|
||||
dummy_output_template_json = json.dumps(dummy_output_template)
|
||||
dummy_import_template_json = json.dumps(dummy_import_template)
|
||||
|
||||
@ -172,6 +163,7 @@ def test_boto3_create_stack():
|
||||
cf_conn.get_template(StackName="test_stack")['TemplateBody'].should.equal(
|
||||
dummy_template)
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
def test_boto3_create_stack_with_yaml():
|
||||
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||
@ -283,6 +275,41 @@ def test_create_stack_from_s3_url():
|
||||
'TemplateBody'].should.equal(dummy_template)
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@mock_s3
|
||||
@mock_ec2
|
||||
def test_update_stack_from_s3_url():
|
||||
s3 = boto3.client('s3')
|
||||
s3_conn = boto3.resource('s3')
|
||||
|
||||
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||
cf_conn.create_stack(
|
||||
StackName="update_stack_from_url",
|
||||
TemplateBody=dummy_template_json,
|
||||
Tags=[{'Key': 'foo', 'Value': 'bar'}],
|
||||
)
|
||||
|
||||
s3_conn.create_bucket(Bucket="foobar")
|
||||
|
||||
s3_conn.Object(
|
||||
'foobar', 'template-key').put(Body=dummy_update_template_json)
|
||||
key_url = s3.generate_presigned_url(
|
||||
ClientMethod='get_object',
|
||||
Params={
|
||||
'Bucket': 'foobar',
|
||||
'Key': 'template-key'
|
||||
}
|
||||
)
|
||||
|
||||
cf_conn.update_stack(
|
||||
StackName="update_stack_from_url",
|
||||
TemplateURL=key_url,
|
||||
)
|
||||
|
||||
cf_conn.get_template(StackName="update_stack_from_url")[
|
||||
'TemplateBody'].should.equal(dummy_update_template)
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
def test_describe_stack_pagination():
|
||||
conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||
@ -382,6 +409,7 @@ def test_delete_stack_from_resource():
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@mock_ec2
|
||||
def test_delete_stack_by_name():
|
||||
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||
cf_conn.create_stack(
|
||||
@ -412,6 +440,7 @@ def test_describe_deleted_stack():
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@mock_ec2
|
||||
def test_describe_updated_stack():
|
||||
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||
cf_conn.create_stack(
|
||||
@ -502,6 +531,7 @@ def test_stack_tags():
|
||||
|
||||
|
||||
@mock_cloudformation
|
||||
@mock_ec2
|
||||
def test_stack_events():
|
||||
cf = boto3.resource('cloudformation', region_name='us-east-1')
|
||||
stack = cf.create_stack(
|
||||
@ -617,6 +647,7 @@ def test_export_names_must_be_unique():
|
||||
TemplateBody=dummy_output_template_json,
|
||||
)
|
||||
|
||||
|
||||
@mock_sqs
|
||||
@mock_cloudformation
|
||||
def test_stack_with_imports():
|
||||
|
@ -1,5 +1,8 @@
|
||||
import boto
|
||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||
import boto3
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch_deprecated
|
||||
|
@ -2,6 +2,8 @@ from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch
|
||||
@ -137,6 +139,52 @@ def test_alarm_state():
|
||||
len(resp['MetricAlarms']).should.equal(2)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_metric_data_no_dimensions():
|
||||
conn = boto3.client('cloudwatch', region_name='us-east-1')
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace='tester',
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName='metric',
|
||||
Value=1.5,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()['Metrics']
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric['Namespace'].should.equal('tester')
|
||||
metric['MetricName'].should.equal('metric')
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_metric_statistics():
|
||||
conn = boto3.client('cloudwatch', region_name='us-east-1')
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace='tester',
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName='metric',
|
||||
Value=1.5,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
stats = conn.get_metric_statistics(
|
||||
Namespace='tester',
|
||||
MetricName='metric',
|
||||
StartTime=utc_now,
|
||||
EndTime=utc_now + timedelta(seconds=60),
|
||||
Period=60,
|
||||
Statistics=['SampleCount', 'Sum']
|
||||
)
|
||||
|
||||
stats['Datapoints'].should.have.length_of(1)
|
||||
datapoint = stats['Datapoints'][0]
|
||||
datapoint['SampleCount'].should.equal(1.0)
|
||||
datapoint['Sum'].should.equal(1.5)
|
||||
|
@ -58,6 +58,19 @@ def test_upload_server_cert():
|
||||
"arn:aws:iam::123456789012:server-certificate/certname")
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_delete_server_cert():
|
||||
conn = boto.connect_iam()
|
||||
|
||||
conn.upload_server_cert("certname", "certbody", "privatekey")
|
||||
conn.get_server_certificate("certname")
|
||||
conn.delete_server_cert("certname")
|
||||
with assert_raises(BotoServerError):
|
||||
conn.get_server_certificate("certname")
|
||||
with assert_raises(BotoServerError):
|
||||
conn.delete_server_cert("certname")
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
@raises(BotoServerError)
|
||||
def test_get_role__should_throw__when_role_does_not_exist():
|
||||
|
@ -4,6 +4,8 @@ import boto.kinesis
|
||||
from boto.kinesis.exceptions import ResourceNotFoundException, InvalidArgumentException
|
||||
import boto3
|
||||
import sure # noqa
|
||||
import datetime
|
||||
import time
|
||||
|
||||
from moto import mock_kinesis, mock_kinesis_deprecated
|
||||
|
||||
@ -262,6 +264,129 @@ def test_get_records_latest():
|
||||
response['Records'][0]['Data'].should.equal('last_record')
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_get_records_at_timestamp():
|
||||
# AT_TIMESTAMP - Read the first record at or after the specified timestamp
|
||||
conn = boto3.client('kinesis', region_name="us-west-2")
|
||||
stream_name = "my_stream"
|
||||
conn.create_stream(StreamName=stream_name, ShardCount=1)
|
||||
|
||||
# Create some data
|
||||
for index in range(1, 5):
|
||||
conn.put_record(StreamName=stream_name,
|
||||
Data=str(index),
|
||||
PartitionKey=str(index))
|
||||
|
||||
# When boto3 floors the timestamp that we pass to get_shard_iterator to
|
||||
# second precision even though AWS supports ms precision:
|
||||
# http://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html
|
||||
# To test around this limitation we wait until we well into the next second
|
||||
# before capturing the time and storing the records we expect to retrieve.
|
||||
time.sleep(1.0)
|
||||
timestamp = datetime.datetime.utcnow()
|
||||
|
||||
keys = [str(i) for i in range(5, 10)]
|
||||
for k in keys:
|
||||
conn.put_record(StreamName=stream_name,
|
||||
Data=k,
|
||||
PartitionKey=k)
|
||||
|
||||
# Get a shard iterator
|
||||
response = conn.describe_stream(StreamName=stream_name)
|
||||
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
|
||||
response = conn.get_shard_iterator(StreamName=stream_name,
|
||||
ShardId=shard_id,
|
||||
ShardIteratorType='AT_TIMESTAMP',
|
||||
Timestamp=timestamp)
|
||||
shard_iterator = response['ShardIterator']
|
||||
|
||||
response = conn.get_records(ShardIterator=shard_iterator)
|
||||
|
||||
response['Records'].should.have.length_of(len(keys))
|
||||
partition_keys = [r['PartitionKey'] for r in response['Records']]
|
||||
partition_keys.should.equal(keys)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_get_records_at_very_old_timestamp():
|
||||
conn = boto3.client('kinesis', region_name="us-west-2")
|
||||
stream_name = "my_stream"
|
||||
conn.create_stream(StreamName=stream_name, ShardCount=1)
|
||||
|
||||
# Create some data
|
||||
keys = [str(i) for i in range(1, 5)]
|
||||
for k in keys:
|
||||
conn.put_record(StreamName=stream_name,
|
||||
Data=k,
|
||||
PartitionKey=k)
|
||||
|
||||
# Get a shard iterator
|
||||
response = conn.describe_stream(StreamName=stream_name)
|
||||
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
|
||||
response = conn.get_shard_iterator(StreamName=stream_name,
|
||||
ShardId=shard_id,
|
||||
ShardIteratorType='AT_TIMESTAMP',
|
||||
Timestamp=1)
|
||||
shard_iterator = response['ShardIterator']
|
||||
|
||||
response = conn.get_records(ShardIterator=shard_iterator)
|
||||
|
||||
response['Records'].should.have.length_of(len(keys))
|
||||
partition_keys = [r['PartitionKey'] for r in response['Records']]
|
||||
partition_keys.should.equal(keys)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_get_records_at_very_new_timestamp():
|
||||
conn = boto3.client('kinesis', region_name="us-west-2")
|
||||
stream_name = "my_stream"
|
||||
conn.create_stream(StreamName=stream_name, ShardCount=1)
|
||||
|
||||
# Create some data
|
||||
keys = [str(i) for i in range(1, 5)]
|
||||
for k in keys:
|
||||
conn.put_record(StreamName=stream_name,
|
||||
Data=k,
|
||||
PartitionKey=k)
|
||||
|
||||
timestamp = datetime.datetime.utcnow() + datetime.timedelta(seconds=1)
|
||||
|
||||
# Get a shard iterator
|
||||
response = conn.describe_stream(StreamName=stream_name)
|
||||
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
|
||||
response = conn.get_shard_iterator(StreamName=stream_name,
|
||||
ShardId=shard_id,
|
||||
ShardIteratorType='AT_TIMESTAMP',
|
||||
Timestamp=timestamp)
|
||||
shard_iterator = response['ShardIterator']
|
||||
|
||||
response = conn.get_records(ShardIterator=shard_iterator)
|
||||
|
||||
response['Records'].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_get_records_from_empty_stream_at_timestamp():
|
||||
conn = boto3.client('kinesis', region_name="us-west-2")
|
||||
stream_name = "my_stream"
|
||||
conn.create_stream(StreamName=stream_name, ShardCount=1)
|
||||
|
||||
timestamp = datetime.datetime.utcnow()
|
||||
|
||||
# Get a shard iterator
|
||||
response = conn.describe_stream(StreamName=stream_name)
|
||||
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
|
||||
response = conn.get_shard_iterator(StreamName=stream_name,
|
||||
ShardId=shard_id,
|
||||
ShardIteratorType='AT_TIMESTAMP',
|
||||
Timestamp=timestamp)
|
||||
shard_iterator = response['ShardIterator']
|
||||
|
||||
response = conn.get_records(ShardIterator=shard_iterator)
|
||||
|
||||
response['Records'].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_kinesis_deprecated
|
||||
def test_invalid_shard_iterator_type():
|
||||
conn = boto.kinesis.connect_to_region("us-west-2")
|
||||
|
Loading…
Reference in New Issue
Block a user