diff --git a/moto/__init__.py b/moto/__init__.py index 9d292a3e1..c38212b42 100644 --- a/moto/__init__.py +++ b/moto/__init__.py @@ -3,7 +3,7 @@ import logging # logging.getLogger('boto').setLevel(logging.CRITICAL) __title__ = 'moto' -__version__ = '1.2.0', +__version__ = '1.2.0' from .acm import mock_acm # flake8: noqa from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa diff --git a/moto/apigateway/models.py b/moto/apigateway/models.py index e7ff98119..cc8696104 100644 --- a/moto/apigateway/models.py +++ b/moto/apigateway/models.py @@ -1,12 +1,11 @@ from __future__ import absolute_import from __future__ import unicode_literals -import datetime import requests +import time from moto.packages.responses import responses from moto.core import BaseBackend, BaseModel -from moto.core.utils import iso_8601_datetime_with_milliseconds from .utils import create_id from .exceptions import StageNotFoundException @@ -20,8 +19,7 @@ class Deployment(BaseModel, dict): self['id'] = deployment_id self['stageName'] = name self['description'] = description - self['createdDate'] = iso_8601_datetime_with_milliseconds( - datetime.datetime.now()) + self['createdDate'] = int(time.time()) class IntegrationResponse(BaseModel, dict): @@ -300,7 +298,7 @@ class RestAPI(BaseModel): self.region_name = region_name self.name = name self.description = description - self.create_date = datetime.datetime.utcnow() + self.create_date = int(time.time()) self.deployments = {} self.stages = {} @@ -313,7 +311,7 @@ class RestAPI(BaseModel): "id": self.id, "name": self.name, "description": self.description, - "createdDate": iso_8601_datetime_with_milliseconds(self.create_date), + "createdDate": int(time.time()), } def add_child(self, path, parent_id=None): diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index 947691bcf..3c3d3ea66 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -104,7 +104,7 @@ class _DockerDataVolumeContext: # It doesn't exist so we need to create it self._vol_ref.volume = self._lambda_func.docker_client.volumes.create(self._lambda_func.code_sha_256) - container = self._lambda_func.docker_client.containers.run('alpine', 'sleep 100', volumes={self.name: '/tmp/data'}, detach=True) + container = self._lambda_func.docker_client.containers.run('alpine', 'sleep 100', volumes={self.name: {'bind': '/tmp/data', 'mode': 'rw'}}, detach=True) try: tar_bytes = zip2tar(self._lambda_func.code_bytes) container.put_archive('/tmp/data', tar_bytes) @@ -309,7 +309,7 @@ class LambdaFunction(BaseModel): finally: if container: try: - exit_code = container.wait(timeout=300) + exit_code = container.wait(timeout=300)['StatusCode'] except requests.exceptions.ReadTimeout: exit_code = -1 container.stop() diff --git a/moto/core/responses.py b/moto/core/responses.py index d254d1f85..278a24dc4 100644 --- a/moto/core/responses.py +++ b/moto/core/responses.py @@ -345,6 +345,10 @@ class BaseResponse(_TemplateEnvironmentMixin): if is_tracked(name) or not name.startswith(param_prefix): continue + if len(name) > len(param_prefix) and \ + not name[len(param_prefix):].startswith('.'): + continue + match = self.param_list_regex.search(name[len(param_prefix):]) if len(name) > len(param_prefix) else None if match: prefix = param_prefix + match.group(1) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index f877d3772..bfc672ed7 100755 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -2943,7 +2943,7 @@ class SpotFleetRequest(TaggedEC2Resource): 'Properties']['SpotFleetRequestConfigData'] ec2_backend = ec2_backends[region_name] - spot_price = properties['SpotPrice'] + spot_price = properties.get('SpotPrice') target_capacity = properties['TargetCapacity'] iam_fleet_role = properties['IamFleetRole'] allocation_strategy = properties['AllocationStrategy'] @@ -2977,7 +2977,8 @@ class SpotFleetRequest(TaggedEC2Resource): launch_spec_index += 1 else: # lowestPrice cheapest_spec = sorted( - self.launch_specs, key=lambda spec: float(spec.spot_price))[0] + # FIXME: change `+inf` to the on demand price scaled to weighted capacity when it's not present + self.launch_specs, key=lambda spec: float(spec.spot_price or '+inf'))[0] weight_so_far = weight_to_add + (weight_to_add % cheapest_spec.weighted_capacity) weight_map[cheapest_spec] = int( weight_so_far // cheapest_spec.weighted_capacity) diff --git a/moto/ec2/responses/spot_fleets.py b/moto/ec2/responses/spot_fleets.py index 81d1e0146..0366af9d6 100644 --- a/moto/ec2/responses/spot_fleets.py +++ b/moto/ec2/responses/spot_fleets.py @@ -40,7 +40,7 @@ class SpotFleets(BaseResponse): def request_spot_fleet(self): spot_config = self._get_dict_param("SpotFleetRequestConfig.") - spot_price = spot_config['spot_price'] + spot_price = spot_config.get('spot_price') target_capacity = spot_config['target_capacity'] iam_fleet_role = spot_config['iam_fleet_role'] allocation_strategy = spot_config['allocation_strategy'] @@ -78,7 +78,9 @@ DESCRIBE_SPOT_FLEET_TEMPLATE = """.+)\.amazonaws\.com/(.+)|' + r'(.+)\.s3-(?P.+)\.amazonaws\.com)/?') + + +def parse_region_from_url(url): + match = REGION_URL_REGEX.search(url) + if match: + region = match.group('region1') or match.group('region2') + else: + region = 'us-east-1' + return region + + def metadata_from_headers(headers): metadata = {} meta_regex = re.compile( diff --git a/requirements-dev.txt b/requirements-dev.txt index 1b151eb29..655be0616 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,7 +8,7 @@ freezegun flask boto>=2.45.0 boto3>=1.4.4 -botocore>=1.5.77 +botocore>=1.8.36 six>=1.9 prompt-toolkit==1.0.14 click==6.7 diff --git a/setup.cfg b/setup.cfg index 3c6e79cf3..fb04c16a8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,8 @@ +[nosetests] +verbosity=1 +detailed-errors=1 +with-coverage=1 +cover-package=moto + [bdist_wheel] universal=1 diff --git a/setup.py b/setup.py index 27c635944..57140401b 100755 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ install_requires = [ "mock", "docker>=2.5.1", "jsondiff==1.1.1", - "aws-xray-sdk>=0.93", + "aws-xray-sdk<0.96,>=0.93", ] extras_require = { diff --git a/tests/test_cloudformation/test_cloudformation_stack_integration.py b/tests/test_cloudformation/test_cloudformation_stack_integration.py index 3a7525585..2c808726f 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_integration.py +++ b/tests/test_cloudformation/test_cloudformation_stack_integration.py @@ -2156,6 +2156,78 @@ def test_stack_spot_fleet(): launch_spec['WeightedCapacity'].should.equal(2.0) +@mock_cloudformation() +@mock_ec2() +def test_stack_spot_fleet_should_figure_out_default_price(): + conn = boto3.client('ec2', 'us-east-1') + + vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc'] + subnet = conn.create_subnet( + VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet'] + subnet_id = subnet['SubnetId'] + + spot_fleet_template = { + 'Resources': { + "SpotFleet1": { + "Type": "AWS::EC2::SpotFleet", + "Properties": { + "SpotFleetRequestConfigData": { + "IamFleetRole": "arn:aws:iam::123456789012:role/fleet", + "TargetCapacity": 6, + "AllocationStrategy": "diversified", + "LaunchSpecifications": [ + { + "EbsOptimized": "false", + "InstanceType": 't2.small', + "ImageId": "ami-1234", + "SubnetId": subnet_id, + "WeightedCapacity": "2", + }, + { + "EbsOptimized": "true", + "InstanceType": 't2.large', + "ImageId": "ami-1234", + "Monitoring": {"Enabled": "true"}, + "SecurityGroups": [{"GroupId": "sg-123"}], + "SubnetId": subnet_id, + "IamInstanceProfile": {"Arn": "arn:aws:iam::123456789012:role/fleet"}, + "WeightedCapacity": "4", + } + ] + } + } + } + } + } + spot_fleet_template_json = json.dumps(spot_fleet_template) + + cf_conn = boto3.client('cloudformation', 'us-east-1') + stack_id = cf_conn.create_stack( + StackName='test_stack', + TemplateBody=spot_fleet_template_json, + )['StackId'] + + stack_resources = cf_conn.list_stack_resources(StackName=stack_id) + stack_resources['StackResourceSummaries'].should.have.length_of(1) + spot_fleet_id = stack_resources[ + 'StackResourceSummaries'][0]['PhysicalResourceId'] + + spot_fleet_requests = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] + len(spot_fleet_requests).should.equal(1) + spot_fleet_request = spot_fleet_requests[0] + spot_fleet_request['SpotFleetRequestState'].should.equal("active") + spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] + + assert 'SpotPrice' not in spot_fleet_config + len(spot_fleet_config['LaunchSpecifications']).should.equal(2) + launch_spec1 = spot_fleet_config['LaunchSpecifications'][0] + launch_spec2 = spot_fleet_config['LaunchSpecifications'][1] + + assert 'SpotPrice' not in launch_spec1 + assert 'SpotPrice' not in launch_spec2 + + @mock_ec2 @mock_elbv2 @mock_cloudformation diff --git a/tests/test_ec2/test_elastic_block_store.py b/tests/test_ec2/test_elastic_block_store.py index 9c07f38d6..fc0677cfe 100644 --- a/tests/test_ec2/test_elastic_block_store.py +++ b/tests/test_ec2/test_elastic_block_store.py @@ -5,10 +5,11 @@ from nose.tools import assert_raises from moto.ec2 import ec2_backends import boto +import boto3 from boto.exception import EC2ResponseError import sure # noqa -from moto import mock_ec2_deprecated +from moto import mock_ec2_deprecated, mock_ec2 @mock_ec2_deprecated @@ -579,3 +580,25 @@ def test_volume_tag_escaping(): snaps = [snap for snap in conn.get_all_snapshots() if snap.id == snapshot.id] dict(snaps[0].tags).should.equal({'key': ''}) + + +@mock_ec2 +def test_search_for_many_snapshots(): + ec2_client = boto3.client('ec2', region_name='eu-west-1') + + volume_response = ec2_client.create_volume( + AvailabilityZone='eu-west-1a', Size=10 + ) + + snapshot_ids = [] + for i in range(1, 20): + create_snapshot_response = ec2_client.create_snapshot( + VolumeId=volume_response['VolumeId'] + ) + snapshot_ids.append(create_snapshot_response['SnapshotId']) + + snapshots_response = ec2_client.describe_snapshots( + SnapshotIds=snapshot_ids + ) + + assert len(snapshots_response['Snapshots']) == len(snapshot_ids) diff --git a/tests/test_ec2/test_spot_fleet.py b/tests/test_ec2/test_spot_fleet.py index a8d33c299..a2bd1d061 100644 --- a/tests/test_ec2/test_spot_fleet.py +++ b/tests/test_ec2/test_spot_fleet.py @@ -316,3 +316,30 @@ def test_modify_spot_fleet_request_down_no_terminate_after_custom_terminate(): SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig'] spot_fleet_config['TargetCapacity'].should.equal(1) spot_fleet_config['FulfilledCapacity'].should.equal(2.0) + + +@mock_ec2 +def test_create_spot_fleet_without_spot_price(): + conn = boto3.client("ec2", region_name='us-west-2') + subnet_id = get_subnet_id(conn) + + # remove prices to force a fallback to ondemand price + spot_config_without_price = spot_config(subnet_id) + del spot_config_without_price['SpotPrice'] + for spec in spot_config_without_price['LaunchSpecifications']: + del spec['SpotPrice'] + + spot_fleet_id = conn.request_spot_fleet(SpotFleetRequestConfig=spot_config_without_price)['SpotFleetRequestId'] + spot_fleet_requests = conn.describe_spot_fleet_requests( + SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'] + len(spot_fleet_requests).should.equal(1) + spot_fleet_request = spot_fleet_requests[0] + spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig'] + + len(spot_fleet_config['LaunchSpecifications']).should.equal(2) + launch_spec1 = spot_fleet_config['LaunchSpecifications'][0] + launch_spec2 = spot_fleet_config['LaunchSpecifications'][1] + + # AWS will figure out the price + assert 'SpotPrice' not in launch_spec1 + assert 'SpotPrice' not in launch_spec2 diff --git a/tests/test_elbv2/test_elbv2.py b/tests/test_elbv2/test_elbv2.py index 4fb527525..ce092976a 100644 --- a/tests/test_elbv2/test_elbv2.py +++ b/tests/test_elbv2/test_elbv2.py @@ -340,6 +340,10 @@ def test_create_target_group_and_listeners(): 'Type': 'forward'}]) http_listener_arn = listener.get('ListenerArn') + response = conn.describe_target_groups(LoadBalancerArn=load_balancer_arn, + Names=['a-target']) + response.get('TargetGroups').should.have.length_of(1) + # And another with SSL response = conn.create_listener( LoadBalancerArn=load_balancer_arn, diff --git a/tests/test_redshift/test_redshift.py b/tests/test_redshift/test_redshift.py index 8759506ec..5527be106 100644 --- a/tests/test_redshift/test_redshift.py +++ b/tests/test_redshift/test_redshift.py @@ -1081,3 +1081,98 @@ def test_tagged_resource_not_found_error(): ResourceName='bad:arn' ).should.throw(ClientError, "Tagging is not supported for this type of resource") + +@mock_redshift +def test_enable_snapshot_copy(): + client = boto3.client('redshift', region_name='us-east-1') + client.create_cluster( + ClusterIdentifier='test', + ClusterType='single-node', + DBName='test', + Encrypted=True, + MasterUsername='user', + MasterUserPassword='password', + NodeType='ds2.xlarge', + ) + client.enable_snapshot_copy( + ClusterIdentifier='test', + DestinationRegion='us-west-2', + RetentionPeriod=3, + SnapshotCopyGrantName='copy-us-east-1-to-us-west-2' + ) + response = client.describe_clusters(ClusterIdentifier='test') + cluster_snapshot_copy_status = response['Clusters'][0]['ClusterSnapshotCopyStatus'] + cluster_snapshot_copy_status['RetentionPeriod'].should.equal(3) + cluster_snapshot_copy_status['DestinationRegion'].should.equal('us-west-2') + cluster_snapshot_copy_status['SnapshotCopyGrantName'].should.equal('copy-us-east-1-to-us-west-2') + + +@mock_redshift +def test_enable_snapshot_copy_unencrypted(): + client = boto3.client('redshift', region_name='us-east-1') + client.create_cluster( + ClusterIdentifier='test', + ClusterType='single-node', + DBName='test', + MasterUsername='user', + MasterUserPassword='password', + NodeType='ds2.xlarge', + ) + client.enable_snapshot_copy( + ClusterIdentifier='test', + DestinationRegion='us-west-2', + ) + response = client.describe_clusters(ClusterIdentifier='test') + cluster_snapshot_copy_status = response['Clusters'][0]['ClusterSnapshotCopyStatus'] + cluster_snapshot_copy_status['RetentionPeriod'].should.equal(7) + cluster_snapshot_copy_status['DestinationRegion'].should.equal('us-west-2') + + +@mock_redshift +def test_disable_snapshot_copy(): + client = boto3.client('redshift', region_name='us-east-1') + client.create_cluster( + DBName='test', + ClusterIdentifier='test', + ClusterType='single-node', + NodeType='ds2.xlarge', + MasterUsername='user', + MasterUserPassword='password', + ) + client.enable_snapshot_copy( + ClusterIdentifier='test', + DestinationRegion='us-west-2', + RetentionPeriod=3, + SnapshotCopyGrantName='copy-us-east-1-to-us-west-2', + ) + client.disable_snapshot_copy( + ClusterIdentifier='test', + ) + response = client.describe_clusters(ClusterIdentifier='test') + response['Clusters'][0].shouldnt.contain('ClusterSnapshotCopyStatus') + + +@mock_redshift +def test_modify_snapshot_copy_retention_period(): + client = boto3.client('redshift', region_name='us-east-1') + client.create_cluster( + DBName='test', + ClusterIdentifier='test', + ClusterType='single-node', + NodeType='ds2.xlarge', + MasterUsername='user', + MasterUserPassword='password', + ) + client.enable_snapshot_copy( + ClusterIdentifier='test', + DestinationRegion='us-west-2', + RetentionPeriod=3, + SnapshotCopyGrantName='copy-us-east-1-to-us-west-2', + ) + client.modify_snapshot_copy_retention_period( + ClusterIdentifier='test', + RetentionPeriod=5, + ) + response = client.describe_clusters(ClusterIdentifier='test') + cluster_snapshot_copy_status = response['Clusters'][0]['ClusterSnapshotCopyStatus'] + cluster_snapshot_copy_status['RetentionPeriod'].should.equal(5) diff --git a/tests/test_s3/test_s3_utils.py b/tests/test_s3/test_s3_utils.py index b4f56d89a..f1dfc04d1 100644 --- a/tests/test_s3/test_s3_utils.py +++ b/tests/test_s3/test_s3_utils.py @@ -1,6 +1,6 @@ from __future__ import unicode_literals from sure import expect -from moto.s3.utils import bucket_name_from_url, _VersionedKeyStore +from moto.s3.utils import bucket_name_from_url, _VersionedKeyStore, parse_region_from_url def test_base_url(): @@ -53,3 +53,21 @@ def test_versioned_key_store(): d.setlist('key', [[1], [2]]) d['key'].should.have.length_of(1) d.getlist('key').should.be.equal([[1], [2]]) + + +def test_parse_region_from_url(): + expected = 'us-west-2' + for url in ['http://s3-us-west-2.amazonaws.com/bucket', + 'http://s3.us-west-2.amazonaws.com/bucket', + 'http://bucket.s3-us-west-2.amazonaws.com', + 'https://s3-us-west-2.amazonaws.com/bucket', + 'https://s3.us-west-2.amazonaws.com/bucket', + 'https://bucket.s3-us-west-2.amazonaws.com']: + parse_region_from_url(url).should.equal(expected) + + expected = 'us-east-1' + for url in ['http://s3.amazonaws.com/bucket', + 'http://bucket.s3.amazonaws.com', + 'https://s3.amazonaws.com/bucket', + 'https://bucket.s3.amazonaws.com']: + parse_region_from_url(url).should.equal(expected)