From 8cca33dc42f0217a00068d6727757953db8e1a3a Mon Sep 17 00:00:00 2001 From: Iiro Sulopuisto Date: Tue, 30 Jul 2019 14:14:14 +0300 Subject: [PATCH 01/42] Test redshift cluster creation time more thoroughly --- tests/test_redshift/test_redshift.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_redshift/test_redshift.py b/tests/test_redshift/test_redshift.py index 541614788..2c9b42a1d 100644 --- a/tests/test_redshift/test_redshift.py +++ b/tests/test_redshift/test_redshift.py @@ -36,6 +36,7 @@ def test_create_cluster_boto3(): response['Cluster']['NodeType'].should.equal('ds2.xlarge') create_time = response['Cluster']['ClusterCreateTime'] create_time.should.be.lower_than(datetime.datetime.now(create_time.tzinfo)) + create_time.should.be.greater_than(datetime.datetime.now(create_time.tzinfo) - datetime.timedelta(minutes=1)) @mock_redshift From 24bd99b5c4d161b1af22b86d48bc4cc141e7e44a Mon Sep 17 00:00:00 2001 From: Iiro Sulopuisto Date: Tue, 30 Jul 2019 14:16:12 +0300 Subject: [PATCH 02/42] Make cluster creation time UTC --- moto/redshift/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/redshift/models.py b/moto/redshift/models.py index 64e5c5e35..c0b783bde 100644 --- a/moto/redshift/models.py +++ b/moto/redshift/models.py @@ -78,7 +78,7 @@ class Cluster(TaggableResourceMixin, BaseModel): super(Cluster, self).__init__(region_name, tags) self.redshift_backend = redshift_backend self.cluster_identifier = cluster_identifier - self.create_time = iso_8601_datetime_with_milliseconds(datetime.datetime.now()) + self.create_time = iso_8601_datetime_with_milliseconds(datetime.datetime.utcnow()) self.status = 'available' self.node_type = node_type self.master_username = master_username From 7fa5ce3dc308c5b1bd8581a39109e14d1977beee Mon Sep 17 00:00:00 2001 From: Chiharu Terashima Date: Sat, 3 Aug 2019 01:28:47 +0900 Subject: [PATCH 03/42] implements APIGateway update_api_key --- .gitignore | 1 + IMPLEMENTATION_COVERAGE.md | 2 +- moto/apigateway/models.py | 23 +++++++++++++++++++++++ moto/apigateway/responses.py | 3 +++ tests/test_apigateway/test_apigateway.py | 21 +++++++++++++++++++-- 5 files changed, 47 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 0a24fe476..0282e3caf 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ python_env .ropeproject/ .pytest_cache/ venv/ +env/ .python-version .vscode/ tests/file.tmp diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 1d9811983..897c3885c 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -181,7 +181,7 @@ - [ ] test_invoke_method - [ ] untag_resource - [ ] update_account -- [ ] update_api_key +- [X] update_api_key - [ ] update_authorizer - [ ] update_base_path_mapping - [ ] update_client_certificate diff --git a/moto/apigateway/models.py b/moto/apigateway/models.py index 41a49e361..6be062d7f 100644 --- a/moto/apigateway/models.py +++ b/moto/apigateway/models.py @@ -309,6 +309,25 @@ class ApiKey(BaseModel, dict): self['createdDate'] = self['lastUpdatedDate'] = int(time.time()) self['stageKeys'] = stageKeys + def update_operations(self, patch_operations): + for op in patch_operations: + if op['op'] == 'replace': + if '/name' in op['path']: + self['name'] = op['value'] + elif '/customerId' in op['path']: + self['customerId'] = op['value'] + elif '/description' in op['path']: + self['description'] = op['value'] + elif '/enabled' in op['path']: + self['enabled'] = self._str2bool(op['value']) + else: + raise Exception( + 'Patch operation "%s" not implemented' % op['op']) + return self + + def _str2bool(self, v): + return v.lower() == "true" + class UsagePlan(BaseModel, dict): @@ -599,6 +618,10 @@ class APIGatewayBackend(BaseBackend): def get_apikey(self, api_key_id): return self.keys[api_key_id] + def update_apikey(self, api_key_id, patch_operations): + key = self.keys[api_key_id] + return key.update_operations(patch_operations) + def delete_apikey(self, api_key_id): self.keys.pop(api_key_id) return {} diff --git a/moto/apigateway/responses.py b/moto/apigateway/responses.py index bc4d262cd..fa82705b1 100644 --- a/moto/apigateway/responses.py +++ b/moto/apigateway/responses.py @@ -245,6 +245,9 @@ class APIGatewayResponse(BaseResponse): if self.method == 'GET': apikey_response = self.backend.get_apikey(apikey) + elif self.method == 'PATCH': + patch_operations = self._get_param('patchOperations') + apikey_response = self.backend.update_apikey(apikey, patch_operations) elif self.method == 'DELETE': apikey_response = self.backend.delete_apikey(apikey) return 200, {}, json.dumps(apikey_response) diff --git a/tests/test_apigateway/test_apigateway.py b/tests/test_apigateway/test_apigateway.py index 5954de8ca..0a33f2f9f 100644 --- a/tests/test_apigateway/test_apigateway.py +++ b/tests/test_apigateway/test_apigateway.py @@ -988,13 +988,30 @@ def test_api_keys(): apikey['name'].should.equal(apikey_name) len(apikey['value']).should.equal(40) + apikey_name = 'TESTKEY3' + payload = {'name': apikey_name } + response = client.create_api_key(**payload) + apikey_id = response['id'] + + patch_operations = [ + {'op': 'replace', 'path': '/name', 'value': 'TESTKEY3_CHANGE'}, + {'op': 'replace', 'path': '/customerId', 'value': '12345'}, + {'op': 'replace', 'path': '/description', 'value': 'APIKEY UPDATE TEST'}, + {'op': 'replace', 'path': '/enabled', 'value': 'false'}, + ] + response = client.update_api_key(apiKey=apikey_id, patchOperations=patch_operations) + response['name'].should.equal('TESTKEY3_CHANGE') + response['customerId'].should.equal('12345') + response['description'].should.equal('APIKEY UPDATE TEST') + response['enabled'].should.equal(False) + response = client.get_api_keys() - len(response['items']).should.equal(2) + len(response['items']).should.equal(3) client.delete_api_key(apiKey=apikey_id) response = client.get_api_keys() - len(response['items']).should.equal(1) + len(response['items']).should.equal(2) @mock_apigateway def test_usage_plans(): From 3c19f0a02dfc440b989047fee751bb65eabb527f Mon Sep 17 00:00:00 2001 From: wndhydrnt Date: Sat, 3 Aug 2019 13:56:07 +0200 Subject: [PATCH 04/42] Convert fields createdAt and updatedAt of deployment to timestamp --- moto/ecs/models.py | 5 +++-- tests/test_ecs/test_ecs_boto3.py | 3 +++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/moto/ecs/models.py b/moto/ecs/models.py index 92759651d..863cfc49e 100644 --- a/moto/ecs/models.py +++ b/moto/ecs/models.py @@ -8,6 +8,7 @@ import boto3 import pytz from moto.core.exceptions import JsonRESTError from moto.core import BaseBackend, BaseModel +from moto.core.utils import unix_time from moto.ec2 import ec2_backends from copy import copy @@ -231,9 +232,9 @@ class Service(BaseObject): for deployment in response_object['deployments']: if isinstance(deployment['createdAt'], datetime): - deployment['createdAt'] = deployment['createdAt'].isoformat() + deployment['createdAt'] = unix_time(deployment['createdAt'].replace(tzinfo=None)) if isinstance(deployment['updatedAt'], datetime): - deployment['updatedAt'] = deployment['updatedAt'].isoformat() + deployment['updatedAt'] = unix_time(deployment['updatedAt'].replace(tzinfo=None)) return response_object diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py index 27f37308e..9937af26b 100644 --- a/tests/test_ecs/test_ecs_boto3.py +++ b/tests/test_ecs/test_ecs_boto3.py @@ -1,4 +1,5 @@ from __future__ import unicode_literals +from datetime import datetime from copy import deepcopy @@ -477,6 +478,8 @@ def test_describe_services(): response['services'][0]['deployments'][0]['pendingCount'].should.equal(2) response['services'][0]['deployments'][0]['runningCount'].should.equal(0) response['services'][0]['deployments'][0]['status'].should.equal('PRIMARY') + (datetime.now() - response['services'][0]['deployments'][0]["createdAt"].replace(tzinfo=None)).seconds.should.be.within(0, 10) + (datetime.now() - response['services'][0]['deployments'][0]["updatedAt"].replace(tzinfo=None)).seconds.should.be.within(0, 10) @mock_ecs From b7884ef9034d79a496e832c4564cd6de6a5d4f75 Mon Sep 17 00:00:00 2001 From: Berislav Kovacki Date: Mon, 5 Aug 2019 17:34:39 +0200 Subject: [PATCH 05/42] Add S3 support for INTELLIGENT_TIERING, GLACIER and DEEP_ARCHIVE storage * Add INTELLIGENT_TIERING, GLACIER and DEEP_ARCHIVE as valid storage classes for objects * Add ObjectNotInActiveTierError error on PUT object copy for GLACIER and DEEP_ARCHIVE storage class objects --- moto/s3/exceptions.py | 11 +++++++ moto/s3/models.py | 3 +- moto/s3/responses.py | 8 +++-- tests/test_s3/test_s3_storageclass.py | 42 +++++++++++++++++++++++---- 4 files changed, 56 insertions(+), 8 deletions(-) diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py index f74fc21ae..8d2326fa1 100644 --- a/moto/s3/exceptions.py +++ b/moto/s3/exceptions.py @@ -60,6 +60,17 @@ class MissingKey(S3ClientError): ) +class ObjectNotInActiveTierError(S3ClientError): + code = 403 + + def __init__(self, key_name): + super(ObjectNotInActiveTierError, self).__init__( + "ObjectNotInActiveTierError", + "The source object of the COPY operation is not in the active tier and is only stored in Amazon Glacier.", + Key=key_name, + ) + + class InvalidPartOrder(S3ClientError): code = 400 diff --git a/moto/s3/models.py b/moto/s3/models.py index 528eacee3..b5aef34d3 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -28,7 +28,8 @@ MAX_BUCKET_NAME_LENGTH = 63 MIN_BUCKET_NAME_LENGTH = 3 UPLOAD_ID_BYTES = 43 UPLOAD_PART_MIN_SIZE = 5242880 -STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"] +STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA", + "INTELLIGENT_TIERING", "GLACIER", "DEEP_ARCHIVE"] DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024 DEFAULT_TEXT_ENCODING = sys.getdefaultencoding() diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 2bd6ed1a3..a05a86de4 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -17,7 +17,7 @@ from moto.s3bucket_path.utils import bucket_name_from_url as bucketpath_bucket_n parse_key_name as bucketpath_parse_key_name, is_delete_keys as bucketpath_is_delete_keys from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, MissingKey, InvalidPartOrder, MalformedXML, \ - MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent + MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent, ObjectNotInActiveTierError from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \ FakeTag from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, parse_region_from_url @@ -902,7 +902,11 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): src_version_id = parse_qs(src_key_parsed.query).get( 'versionId', [None])[0] - if self.backend.get_key(src_bucket, src_key, version_id=src_version_id): + key = self.backend.get_key(src_bucket, src_key, version_id=src_version_id) + + if key is not None: + if key.storage_class in ["GLACIER", "DEEP_ARCHIVE"]: + raise ObjectNotInActiveTierError(key) self.backend.copy_key(src_bucket, src_key, bucket_name, key_name, storage=storage_class, acl=acl, src_version_id=src_version_id) else: diff --git a/tests/test_s3/test_s3_storageclass.py b/tests/test_s3/test_s3_storageclass.py index 99908c501..c72b773a9 100644 --- a/tests/test_s3/test_s3_storageclass.py +++ b/tests/test_s3/test_s3_storageclass.py @@ -1,16 +1,12 @@ from __future__ import unicode_literals -import boto import boto3 -from boto.exception import S3CreateError, S3ResponseError -from boto.s3.lifecycle import Lifecycle, Transition, Expiration, Rule import sure # noqa from botocore.exceptions import ClientError -from datetime import datetime from nose.tools import assert_raises -from moto import mock_s3_deprecated, mock_s3 +from moto import mock_s3 @mock_s3 @@ -41,6 +37,18 @@ def test_s3_storage_class_infrequent_access(): D['Contents'][0]["StorageClass"].should.equal("STANDARD_IA") +@mock_s3 +def test_s3_storage_class_intelligent_tiering(): + s3 = boto3.client("s3") + + s3.create_bucket(Bucket="Bucket") + s3.put_object(Bucket="Bucket", Key="my_key_infrequent", Body="my_value_infrequent", StorageClass="INTELLIGENT_TIERING") + + objects = s3.list_objects(Bucket="Bucket") + + objects['Contents'][0]["StorageClass"].should.equal("INTELLIGENT_TIERING") + + @mock_s3 def test_s3_storage_class_copy(): s3 = boto3.client("s3") @@ -90,6 +98,7 @@ def test_s3_invalid_storage_class(): e.response["Error"]["Code"].should.equal("InvalidStorageClass") e.response["Error"]["Message"].should.equal("The storage class you specified is not valid") + @mock_s3 def test_s3_default_storage_class(): s3 = boto3.client("s3") @@ -103,4 +112,27 @@ def test_s3_default_storage_class(): list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD") +@mock_s3 +def test_s3_copy_object_error_for_glacier_storage_class(): + s3 = boto3.client("s3") + s3.create_bucket(Bucket="Bucket") + s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER") + + with assert_raises(ClientError) as exc: + s3.copy_object(CopySource={"Bucket": "Bucket", "Key": "First_Object"}, Bucket="Bucket", Key="Second_Object") + + exc.exception.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError") + + +@mock_s3 +def test_s3_copy_object_error_for_deep_archive_storage_class(): + s3 = boto3.client("s3") + s3.create_bucket(Bucket="Bucket") + + s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE") + + with assert_raises(ClientError) as exc: + s3.copy_object(CopySource={"Bucket": "Bucket", "Key": "First_Object"}, Bucket="Bucket", Key="Second_Object") + + exc.exception.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError") From a3794f2701eb54393b44d13da11680ec5314b8b8 Mon Sep 17 00:00:00 2001 From: aksagrimada <1955605+aksagrimada@users.noreply.github.com> Date: Tue, 6 Aug 2019 07:16:00 +0100 Subject: [PATCH 06/42] Resolve invalid escape sequence When run not as a decorator dynamodb2 displays an invalid escape sequence error /moto/dynamodb2/responses.py:603: DeprecationWarning: invalid escape sequence \s '\s*([=\+-])\s*', '\\1', update_expression) --- moto/dynamodb2/responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index d34b176a7..86ca9a362 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -600,7 +600,7 @@ class DynamoHandler(BaseResponse): # E.g. `a = b + c` -> `a=b+c` if update_expression: update_expression = re.sub( - '\s*([=\+-])\s*', '\\1', update_expression) + r'\s*([=\+-])\s*', '\\1', update_expression) try: item = self.dynamodb_backend.update_item( From a35a55ec261c2ce8dcbd4d14c77426c5aec06c2f Mon Sep 17 00:00:00 2001 From: Berislav Kovacki Date: Tue, 6 Aug 2019 22:13:52 +0200 Subject: [PATCH 07/42] Add option to call batch submit_job with job definition name only * Add option to call batch submit_job with job definition name only * Fix bug which causes register_job_definition not to increment job revision number after a second revision --- moto/batch/models.py | 26 ++++++----- tests/test_batch/test_batch.py | 81 ++++++++++++++++++++++++++++++++++ 2 files changed, 96 insertions(+), 11 deletions(-) diff --git a/moto/batch/models.py b/moto/batch/models.py index c47ca6e97..caa442802 100644 --- a/moto/batch/models.py +++ b/moto/batch/models.py @@ -514,10 +514,13 @@ class BatchBackend(BaseBackend): return self._job_definitions.get(arn) def get_job_definition_by_name(self, name): - for comp_env in self._job_definitions.values(): - if comp_env.name == name: - return comp_env - return None + latest_revision = -1 + latest_job = None + for job_def in self._job_definitions.values(): + if job_def.name == name and job_def.revision > latest_revision: + latest_job = job_def + latest_revision = job_def.revision + return latest_job def get_job_definition_by_name_revision(self, name, revision): for job_def in self._job_definitions.values(): @@ -534,10 +537,13 @@ class BatchBackend(BaseBackend): :return: Job definition or None :rtype: JobDefinition or None """ - env = self.get_job_definition_by_arn(identifier) - if env is None: - env = self.get_job_definition_by_name(identifier) - return env + job_def = self.get_job_definition_by_arn(identifier) + if job_def is None: + if ':' in identifier: + job_def = self.get_job_definition_by_name_revision(*identifier.split(':', 1)) + else: + job_def = self.get_job_definition_by_name(identifier) + return job_def def get_job_definitions(self, identifier): """ @@ -984,9 +990,7 @@ class BatchBackend(BaseBackend): # TODO parameters, retries (which is a dict raw from request), job dependancies and container overrides are ignored for now # Look for job definition - job_def = self.get_job_definition_by_arn(job_def_id) - if job_def is None and ':' in job_def_id: - job_def = self.get_job_definition_by_name_revision(*job_def_id.split(':', 1)) + job_def = self.get_job_definition(job_def_id) if job_def is None: raise ClientException('Job definition {0} does not exist'.format(job_def_id)) diff --git a/tests/test_batch/test_batch.py b/tests/test_batch/test_batch.py index 310ac0b48..89a8d4d0e 100644 --- a/tests/test_batch/test_batch.py +++ b/tests/test_batch/test_batch.py @@ -642,6 +642,87 @@ def test_describe_task_definition(): len(resp['jobDefinitions']).should.equal(3) +@mock_logs +@mock_ec2 +@mock_ecs +@mock_iam +@mock_batch +def test_submit_job_by_name(): + ec2_client, iam_client, ecs_client, logs_client, batch_client = _get_clients() + vpc_id, subnet_id, sg_id, iam_arn = _setup(ec2_client, iam_client) + + compute_name = 'test_compute_env' + resp = batch_client.create_compute_environment( + computeEnvironmentName=compute_name, + type='UNMANAGED', + state='ENABLED', + serviceRole=iam_arn + ) + arn = resp['computeEnvironmentArn'] + + resp = batch_client.create_job_queue( + jobQueueName='test_job_queue', + state='ENABLED', + priority=123, + computeEnvironmentOrder=[ + { + 'order': 123, + 'computeEnvironment': arn + }, + ] + ) + queue_arn = resp['jobQueueArn'] + + job_definition_name = 'sleep10' + + batch_client.register_job_definition( + jobDefinitionName=job_definition_name, + type='container', + containerProperties={ + 'image': 'busybox', + 'vcpus': 1, + 'memory': 128, + 'command': ['sleep', '10'] + } + ) + batch_client.register_job_definition( + jobDefinitionName=job_definition_name, + type='container', + containerProperties={ + 'image': 'busybox', + 'vcpus': 1, + 'memory': 256, + 'command': ['sleep', '10'] + } + ) + resp = batch_client.register_job_definition( + jobDefinitionName=job_definition_name, + type='container', + containerProperties={ + 'image': 'busybox', + 'vcpus': 1, + 'memory': 512, + 'command': ['sleep', '10'] + } + ) + job_definition_arn = resp['jobDefinitionArn'] + + resp = batch_client.submit_job( + jobName='test1', + jobQueue=queue_arn, + jobDefinition=job_definition_name + ) + job_id = resp['jobId'] + + resp_jobs = batch_client.describe_jobs(jobs=[job_id]) + + # batch_client.terminate_job(jobId=job_id) + + len(resp_jobs['jobs']).should.equal(1) + resp_jobs['jobs'][0]['jobId'].should.equal(job_id) + resp_jobs['jobs'][0]['jobQueue'].should.equal(queue_arn) + resp_jobs['jobs'][0]['jobDefinition'].should.equal(job_definition_arn) + # SLOW TESTS @expected_failure @mock_logs From 7d453fec9ad57ae39ecad60cbba5f8d471e73925 Mon Sep 17 00:00:00 2001 From: Ashley Gould Date: Tue, 6 Aug 2019 15:44:49 -0700 Subject: [PATCH 08/42] [Resolves #2355] - create_organization(): add master account, default policy Model: OrganizationsBackend Method: create_organization create_organization now creates master account, root ou, and a default service control policy objects and adds them to the OrganizationsBackend object. the policy is attached to both the master account and the root ou. any subsiquently created accounts or OU also have the default policy attached. --- moto/organizations/models.py | 40 +++++++++++++++++-- moto/organizations/utils.py | 5 ++- .../organizations_test_utils.py | 1 - .../test_organizations_boto3.py | 31 +++++++++++--- 4 files changed, 64 insertions(+), 13 deletions(-) diff --git a/moto/organizations/models.py b/moto/organizations/models.py index 91004b9ba..561c6c3a8 100644 --- a/moto/organizations/models.py +++ b/moto/organizations/models.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals import datetime import re +import json from moto.core import BaseBackend, BaseModel from moto.core.exceptions import RESTError @@ -151,7 +152,6 @@ class FakeRoot(FakeOrganizationalUnit): class FakeServiceControlPolicy(BaseModel): def __init__(self, organization, **kwargs): - self.type = 'POLICY' self.content = kwargs.get('Content') self.description = kwargs.get('Description') self.name = kwargs.get('Name') @@ -197,7 +197,38 @@ class OrganizationsBackend(BaseBackend): def create_organization(self, **kwargs): self.org = FakeOrganization(kwargs['FeatureSet']) - self.ou.append(FakeRoot(self.org)) + root_ou = FakeRoot(self.org) + self.ou.append(root_ou) + master_account = FakeAccount( + self.org, + AccountName='master', + Email=self.org.master_account_email, + ) + master_account.id = self.org.master_account_id + self.accounts.append(master_account) + default_policy = FakeServiceControlPolicy( + self.org, + Name='FullAWSAccess', + Description='Allows access to every operation', + Type='SERVICE_CONTROL_POLICY', + Content=json.dumps( + { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": "*", + "Resource": "*" + } + ] + } + ) + ) + default_policy.id = utils.DEFAULT_POLICY_ID + default_policy.aws_managed = True + self.policies.append(default_policy) + self.attach_policy(PolicyId=default_policy.id, TargetId=root_ou.id) + self.attach_policy(PolicyId=default_policy.id, TargetId=master_account.id) return self.org.describe() def describe_organization(self): @@ -216,6 +247,7 @@ class OrganizationsBackend(BaseBackend): def create_organizational_unit(self, **kwargs): new_ou = FakeOrganizationalUnit(self.org, **kwargs) self.ou.append(new_ou) + self.attach_policy(PolicyId=utils.DEFAULT_POLICY_ID, TargetId=new_ou.id) return new_ou.describe() def get_organizational_unit_by_id(self, ou_id): @@ -258,6 +290,7 @@ class OrganizationsBackend(BaseBackend): def create_account(self, **kwargs): new_account = FakeAccount(self.org, **kwargs) self.accounts.append(new_account) + self.attach_policy(PolicyId=utils.DEFAULT_POLICY_ID, TargetId=new_account.id) return new_account.create_account_status def get_account_by_id(self, account_id): @@ -358,8 +391,7 @@ class OrganizationsBackend(BaseBackend): def attach_policy(self, **kwargs): policy = next((p for p in self.policies if p.id == kwargs['PolicyId']), None) - if (re.compile(utils.ROOT_ID_REGEX).match(kwargs['TargetId']) or - re.compile(utils.OU_ID_REGEX).match(kwargs['TargetId'])): + if (re.compile(utils.ROOT_ID_REGEX).match(kwargs['TargetId']) or re.compile(utils.OU_ID_REGEX).match(kwargs['TargetId'])): ou = next((ou for ou in self.ou if ou.id == kwargs['TargetId']), None) if ou is not None: if ou not in ou.attached_policies: diff --git a/moto/organizations/utils.py b/moto/organizations/utils.py index bde3660d2..5cbe59ada 100644 --- a/moto/organizations/utils.py +++ b/moto/organizations/utils.py @@ -4,7 +4,8 @@ import random import string MASTER_ACCOUNT_ID = '123456789012' -MASTER_ACCOUNT_EMAIL = 'fakeorg@moto-example.com' +MASTER_ACCOUNT_EMAIL = 'master@example.com' +DEFAULT_POLICY_ID = 'p-FullAWSAccess' ORGANIZATION_ARN_FORMAT = 'arn:aws:organizations::{0}:organization/{1}' MASTER_ACCOUNT_ARN_FORMAT = 'arn:aws:organizations::{0}:account/{1}/{0}' ACCOUNT_ARN_FORMAT = 'arn:aws:organizations::{0}:account/{1}/{2}' @@ -26,7 +27,7 @@ ROOT_ID_REGEX = r'r-[a-z0-9]{%s}' % ROOT_ID_SIZE OU_ID_REGEX = r'ou-[a-z0-9]{%s}-[a-z0-9]{%s}' % (ROOT_ID_SIZE, OU_ID_SUFFIX_SIZE) ACCOUNT_ID_REGEX = r'[0-9]{%s}' % ACCOUNT_ID_SIZE CREATE_ACCOUNT_STATUS_ID_REGEX = r'car-[a-z0-9]{%s}' % CREATE_ACCOUNT_STATUS_ID_SIZE -SCP_ID_REGEX = r'p-[a-z0-9]{%s}' % SCP_ID_SIZE +SCP_ID_REGEX = r'%s|p-[a-z0-9]{%s}' % (DEFAULT_POLICY_ID, SCP_ID_SIZE) def make_random_org_id(): diff --git a/tests/test_organizations/organizations_test_utils.py b/tests/test_organizations/organizations_test_utils.py index 36933d41a..83b60b877 100644 --- a/tests/test_organizations/organizations_test_utils.py +++ b/tests/test_organizations/organizations_test_utils.py @@ -1,7 +1,6 @@ from __future__ import unicode_literals import six -import sure # noqa import datetime from moto.organizations import utils diff --git a/tests/test_organizations/test_organizations_boto3.py b/tests/test_organizations/test_organizations_boto3.py index 05f831e62..28f8cca91 100644 --- a/tests/test_organizations/test_organizations_boto3.py +++ b/tests/test_organizations/test_organizations_boto3.py @@ -3,7 +3,6 @@ from __future__ import unicode_literals import boto3 import json import six -import sure # noqa from botocore.exceptions import ClientError from nose.tools import assert_raises @@ -27,6 +26,25 @@ def test_create_organization(): validate_organization(response) response['Organization']['FeatureSet'].should.equal('ALL') + response = client.list_accounts() + len(response['Accounts']).should.equal(1) + response['Accounts'][0]['Name'].should.equal('master') + response['Accounts'][0]['Id'].should.equal(utils.MASTER_ACCOUNT_ID) + response['Accounts'][0]['Email'].should.equal(utils.MASTER_ACCOUNT_EMAIL) + + response = client.list_policies(Filter='SERVICE_CONTROL_POLICY') + len(response['Policies']).should.equal(1) + response['Policies'][0]['Name'].should.equal('FullAWSAccess') + response['Policies'][0]['Id'].should.equal(utils.DEFAULT_POLICY_ID) + response['Policies'][0]['AwsManaged'].should.equal(True) + + response = client.list_targets_for_policy(PolicyId=utils.DEFAULT_POLICY_ID) + len(response['Targets']).should.equal(2) + root_ou = [t for t in response['Targets'] if t['Type'] == 'ROOT'][0] + root_ou['Name'].should.equal('Root') + master_account = [t for t in response['Targets'] if t['Type'] == 'ACCOUNT'][0] + master_account['Name'].should.equal('master') + @mock_organizations def test_describe_organization(): @@ -177,11 +195,11 @@ def test_list_accounts(): response = client.list_accounts() response.should.have.key('Accounts') accounts = response['Accounts'] - len(accounts).should.equal(5) + len(accounts).should.equal(6) for account in accounts: validate_account(org, account) - accounts[3]['Name'].should.equal(mockname + '3') - accounts[2]['Email'].should.equal(mockname + '2' + '@' + mockdomain) + accounts[4]['Name'].should.equal(mockname + '3') + accounts[3]['Email'].should.equal(mockname + '2' + '@' + mockdomain) @mock_organizations @@ -291,8 +309,10 @@ def test_list_children(): response02 = client.list_children(ParentId=root_id, ChildType='ORGANIZATIONAL_UNIT') response03 = client.list_children(ParentId=ou01_id, ChildType='ACCOUNT') response04 = client.list_children(ParentId=ou01_id, ChildType='ORGANIZATIONAL_UNIT') - response01['Children'][0]['Id'].should.equal(account01_id) + response01['Children'][0]['Id'].should.equal(utils.MASTER_ACCOUNT_ID) response01['Children'][0]['Type'].should.equal('ACCOUNT') + response01['Children'][1]['Id'].should.equal(account01_id) + response01['Children'][1]['Type'].should.equal('ACCOUNT') response02['Children'][0]['Id'].should.equal(ou01_id) response02['Children'][0]['Type'].should.equal('ORGANIZATIONAL_UNIT') response03['Children'][0]['Id'].should.equal(account02_id) @@ -591,4 +611,3 @@ def test_list_targets_for_policy_exception(): ex.operation_name.should.equal('ListTargetsForPolicy') ex.response['Error']['Code'].should.equal('400') ex.response['Error']['Message'].should.contain('InvalidInputException') - From 5063ffc837aa755491d15671db16d5ba68fce902 Mon Sep 17 00:00:00 2001 From: Berislav Kovacki Date: Wed, 7 Aug 2019 17:37:53 +0200 Subject: [PATCH 09/42] Implement pagination support for GetLogEvents * Add nextForwardToken and nextBackwardToken to GetLogEvents response * Handle end of stream by returning the same token as passed in --- moto/logs/models.py | 28 ++++++++++++----- tests/test_logs/test_logs.py | 60 ++++++++++++++++++++++++++++++++++++ 2 files changed, 80 insertions(+), 8 deletions(-) diff --git a/moto/logs/models.py b/moto/logs/models.py index a44b76812..2b8dcfeb4 100644 --- a/moto/logs/models.py +++ b/moto/logs/models.py @@ -98,17 +98,29 @@ class LogStream: return True + def get_paging_token_from_index(index, back=False): + if index is not None: + return "b/{:056d}".format(index) if back else "f/{:056d}".format(index) + return 0 + + def get_index_from_paging_token(token): + if token is not None: + return int(token[2:]) + return 0 + events = sorted(filter(filter_func, self.events), key=lambda event: event.timestamp, reverse=start_from_head) - back_token = next_token - if next_token is None: - next_token = 0 + next_index = get_index_from_paging_token(next_token) + back_index = next_index - events_page = [event.to_response_dict() for event in events[next_token: next_token + limit]] - next_token += limit - if next_token >= len(self.events): - next_token = None + events_page = [event.to_response_dict() for event in events[next_index: next_index + limit]] + if next_index + limit < len(self.events): + next_index += limit - return events_page, back_token, next_token + back_index -= limit + if back_index <= 0: + back_index = 0 + + return events_page, get_paging_token_from_index(back_index, True), get_paging_token_from_index(next_index) def filter_log_events(self, log_group_name, log_stream_names, start_time, end_time, limit, next_token, filter_pattern, interleaved): def filter_func(event): diff --git a/tests/test_logs/test_logs.py b/tests/test_logs/test_logs.py index 7048061f0..49e593fdc 100644 --- a/tests/test_logs/test_logs.py +++ b/tests/test_logs/test_logs.py @@ -162,3 +162,63 @@ def test_delete_retention_policy(): response = conn.delete_log_group(logGroupName=log_group_name) + +@mock_logs +def test_get_log_events(): + conn = boto3.client('logs', 'us-west-2') + log_group_name = 'test' + log_stream_name = 'stream' + conn.create_log_group(logGroupName=log_group_name) + conn.create_log_stream( + logGroupName=log_group_name, + logStreamName=log_stream_name + ) + + events = [{'timestamp': x, 'message': str(x)} for x in range(20)] + + conn.put_log_events( + logGroupName=log_group_name, + logStreamName=log_stream_name, + logEvents=events + ) + + resp = conn.get_log_events( + logGroupName=log_group_name, + logStreamName=log_stream_name, + limit=10) + + resp['events'].should.have.length_of(10) + resp.should.have.key('nextForwardToken') + resp.should.have.key('nextBackwardToken') + for i in range(10): + resp['events'][i]['timestamp'].should.equal(i) + resp['events'][i]['message'].should.equal(str(i)) + + next_token = resp['nextForwardToken'] + + resp = conn.get_log_events( + logGroupName=log_group_name, + logStreamName=log_stream_name, + nextToken=next_token, + limit=10) + + resp['events'].should.have.length_of(10) + resp.should.have.key('nextForwardToken') + resp.should.have.key('nextBackwardToken') + resp['nextForwardToken'].should.equal(next_token) + for i in range(10): + resp['events'][i]['timestamp'].should.equal(i+10) + resp['events'][i]['message'].should.equal(str(i+10)) + + resp = conn.get_log_events( + logGroupName=log_group_name, + logStreamName=log_stream_name, + nextToken=resp['nextBackwardToken'], + limit=10) + + resp['events'].should.have.length_of(10) + resp.should.have.key('nextForwardToken') + resp.should.have.key('nextBackwardToken') + for i in range(10): + resp['events'][i]['timestamp'].should.equal(i) + resp['events'][i]['message'].should.equal(str(i)) From a43228c5afdb23b2ca5af1af8ae1ac4a8b665b8c Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Fri, 9 Aug 2019 10:15:56 -0500 Subject: [PATCH 10/42] Refactor validating ELB actions. --- moto/elbv2/models.py | 34 ++++++++++++---------------------- 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/moto/elbv2/models.py b/moto/elbv2/models.py index 508541f91..fe009b84f 100644 --- a/moto/elbv2/models.py +++ b/moto/elbv2/models.py @@ -429,6 +429,17 @@ class ELBv2Backend(BaseBackend): if rule.priority == priority: raise PriorityInUseError() + self._validate_actions(actions) + + # TODO: check for error 'TooManyRegistrationsForTargetId' + # TODO: check for error 'TooManyRules' + + # create rule + rule = FakeRule(listener.arn, conditions, priority, actions, is_default=False) + listener.register(rule) + return [rule] + + def _validate_actions(self, actions): # validate Actions target_group_arns = [target_group.arn for target_group in self.target_groups.values()] for i, action in enumerate(actions): @@ -444,14 +455,6 @@ class ELBv2Backend(BaseBackend): else: raise InvalidActionTypeError(action_type, index) - # TODO: check for error 'TooManyRegistrationsForTargetId' - # TODO: check for error 'TooManyRules' - - # create rule - rule = FakeRule(listener.arn, conditions, priority, actions, is_default=False) - listener.register(rule) - return [rule] - def create_target_group(self, name, **kwargs): if len(name) > 32: raise InvalidTargetGroupNameError( @@ -673,20 +676,7 @@ class ELBv2Backend(BaseBackend): # TODO: check pattern of value for 'path-pattern' # validate Actions - target_group_arns = [target_group.arn for target_group in self.target_groups.values()] - if actions: - for i, action in enumerate(actions): - index = i + 1 - action_type = action['type'] - if action_type == 'forward': - action_target_group_arn = action['target_group_arn'] - if action_target_group_arn not in target_group_arns: - raise ActionTargetGroupNotFoundError(action_target_group_arn) - elif action_type == 'redirect': - # nothing to do - pass - else: - raise InvalidActionTypeError(action_type, index) + self._validate_actions(actions) # TODO: check for error 'TooManyRegistrationsForTargetId' # TODO: check for error 'TooManyRules' From 7b3846f6a1a33c55a4b60837a60dac9ef2f04ee4 Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Fri, 9 Aug 2019 23:34:52 -0500 Subject: [PATCH 11/42] Refactor Actions to be a real class. Add elb cognito. --- moto/elbv2/models.py | 69 ++++++++++++------ moto/elbv2/responses.py | 47 ++---------- tests/test_elbv2/test_elbv2.py | 129 +++++++++++++++++++++++++++++++++ 3 files changed, 181 insertions(+), 64 deletions(-) diff --git a/moto/elbv2/models.py b/moto/elbv2/models.py index fe009b84f..7e73c7042 100644 --- a/moto/elbv2/models.py +++ b/moto/elbv2/models.py @@ -2,9 +2,11 @@ from __future__ import unicode_literals import datetime import re +from jinja2 import Template from moto.compat import OrderedDict from moto.core.exceptions import RESTError from moto.core import BaseBackend, BaseModel +from moto.core.utils import camelcase_to_underscores from moto.ec2.models import ec2_backends from moto.acm.models import acm_backends from .utils import make_arn_for_target_group @@ -213,13 +215,12 @@ class FakeListener(BaseModel): action_type = action['Type'] if action_type == 'forward': default_actions.append({'type': action_type, 'target_group_arn': action['TargetGroupArn']}) - elif action_type == 'redirect': - redirect_action = {'type': action_type, } - for redirect_config_key, redirect_config_value in action['RedirectConfig'].items(): + elif action_type in ['redirect', 'authenticate-cognito']: + redirect_action = {'type': action_type} + key = 'RedirectConfig' if action_type == 'redirect' else 'AuthenticateCognitoConfig' + for redirect_config_key, redirect_config_value in action[key].items(): # need to match the output of _get_list_prefix - if redirect_config_key == 'StatusCode': - redirect_config_key = 'status_code' - redirect_action['redirect_config._' + redirect_config_key.lower()] = redirect_config_value + redirect_action[camelcase_to_underscores(key) + '._' + camelcase_to_underscores(redirect_config_key)] = redirect_config_value default_actions.append(redirect_action) else: raise InvalidActionTypeError(action_type, i + 1) @@ -231,6 +232,32 @@ class FakeListener(BaseModel): return listener +class FakeAction(BaseModel): + def __init__(self, data): + self.data = data + self.type = data.get("type") + + def to_xml(self): + template = Template("""{{ action.type }} + {% if action.type == "forward" %} + {{ action.data["target_group_arn"] }} + {% elif action.type == "redirect" %} + + {{ action.data["redirect_config._protocol"] }} + {{ action.data["redirect_config._port"] }} + {{ action.data["redirect_config._status_code"] }} + + {% elif action.type == "authenticate-cognito" %} + + {{ action.data["authenticate_cognito_config._user_pool_arn"] }} + {{ action.data["authenticate_cognito_config._user_pool_client_id"] }} + {{ action.data["authenticate_cognito_config._user_pool_domain"] }} + + {% endif %} + """) + return template.render(action=self) + + class FakeRule(BaseModel): def __init__(self, listener_arn, conditions, priority, actions, is_default): @@ -402,6 +429,7 @@ class ELBv2Backend(BaseBackend): return new_load_balancer def create_rule(self, listener_arn, conditions, priority, actions): + actions = [FakeAction(action) for action in actions] listeners = self.describe_listeners(None, [listener_arn]) if not listeners: raise ListenerNotFoundError() @@ -444,13 +472,12 @@ class ELBv2Backend(BaseBackend): target_group_arns = [target_group.arn for target_group in self.target_groups.values()] for i, action in enumerate(actions): index = i + 1 - action_type = action['type'] + action_type = action.type if action_type == 'forward': - action_target_group_arn = action['target_group_arn'] + action_target_group_arn = action.data['target_group_arn'] if action_target_group_arn not in target_group_arns: raise ActionTargetGroupNotFoundError(action_target_group_arn) - elif action_type == 'redirect': - # nothing to do + elif action_type in ['redirect', 'authenticate-cognito']: pass else: raise InvalidActionTypeError(action_type, index) @@ -498,26 +525,22 @@ class ELBv2Backend(BaseBackend): return target_group def create_listener(self, load_balancer_arn, protocol, port, ssl_policy, certificate, default_actions): + default_actions = [FakeAction(action) for action in default_actions] balancer = self.load_balancers.get(load_balancer_arn) if balancer is None: raise LoadBalancerNotFoundError() if port in balancer.listeners: raise DuplicateListenerError() + self._validate_actions(default_actions) + arn = load_balancer_arn.replace(':loadbalancer/', ':listener/') + "/%s%s" % (port, id(self)) listener = FakeListener(load_balancer_arn, arn, protocol, port, ssl_policy, certificate, default_actions) balancer.listeners[listener.arn] = listener - for i, action in enumerate(default_actions): - action_type = action['type'] - if action_type == 'forward': - if action['target_group_arn'] in self.target_groups.keys(): - target_group = self.target_groups[action['target_group_arn']] - target_group.load_balancer_arns.append(load_balancer_arn) - elif action_type == 'redirect': - # nothing to do - pass - else: - raise InvalidActionTypeError(action_type, i + 1) + for action in default_actions: + if action.type == 'forward': + target_group = self.target_groups[action.data['target_group_arn']] + target_group.load_balancer_arns.append(load_balancer_arn) return listener @@ -651,6 +674,7 @@ class ELBv2Backend(BaseBackend): raise ListenerNotFoundError() def modify_rule(self, rule_arn, conditions, actions): + actions = [FakeAction(action) for action in actions] # if conditions or actions is empty list, do not update the attributes if not conditions and not actions: raise InvalidModifyRuleArgumentsError() @@ -841,6 +865,7 @@ class ELBv2Backend(BaseBackend): return target_group def modify_listener(self, arn, port=None, protocol=None, ssl_policy=None, certificates=None, default_actions=None): + default_actions = [FakeAction(action) for action in default_actions] for load_balancer in self.load_balancers.values(): if arn in load_balancer.listeners: break @@ -907,7 +932,7 @@ class ELBv2Backend(BaseBackend): for listener in load_balancer.listeners.values(): for rule in listener.rules: for action in rule.actions: - if action.get('target_group_arn') == target_group_arn: + if action.data.get('target_group_arn') == target_group_arn: return True return False diff --git a/moto/elbv2/responses.py b/moto/elbv2/responses.py index c98435440..25c23bb17 100644 --- a/moto/elbv2/responses.py +++ b/moto/elbv2/responses.py @@ -775,16 +775,7 @@ CREATE_LISTENER_TEMPLATE = """{{ action["target_group_arn"] }} - {% elif action["type"] == "redirect" %} - - {{ action["redirect_config._protocol"] }} - {{ action["redirect_config._port"] }} - {{ action["redirect_config._status_code"] }} - - {% endif %} + {{ action.to_xml() }} {% endfor %} @@ -888,16 +879,7 @@ DESCRIBE_RULES_TEMPLATE = """ - {% if action["type"] == "forward" %} - {{ action["target_group_arn"] }} - {% elif action["type"] == "redirect" %} - - {{ action["redirect_config._protocol"] }} - {{ action["redirect_config._port"] }} - {{ action["redirect_config._status_code"] }} - - {% endif %} + {{ action.to_xml() }} {% endfor %} @@ -989,16 +971,7 @@ DESCRIBE_LISTENERS_TEMPLATE = """{{ action["target_group_arn"] }}m - {% elif action["type"] == "redirect" %} - - {{ action["redirect_config._protocol"] }} - {{ action["redirect_config._port"] }} - {{ action["redirect_config._status_code"] }} - - {% endif %} + {{ action.to_xml() }} {% endfor %} @@ -1048,8 +1021,7 @@ MODIFY_RULE_TEMPLATE = """ - {{ action["target_group_arn"] }} + {{ action.to_xml() }} {% endfor %} @@ -1432,16 +1404,7 @@ MODIFY_LISTENER_TEMPLATE = """{{ action["target_group_arn"] }} - {% elif action["type"] == "redirect" %} - - {{ action["redirect_config._protocol"] }} - {{ action["redirect_config._port"] }} - {{ action["redirect_config._status_code"] }} - - {% endif %} + {{ action.to_xml() }} {% endfor %} diff --git a/tests/test_elbv2/test_elbv2.py b/tests/test_elbv2/test_elbv2.py index 879a04cd8..36772c02e 100644 --- a/tests/test_elbv2/test_elbv2.py +++ b/tests/test_elbv2/test_elbv2.py @@ -1811,3 +1811,132 @@ def test_redirect_action_listener_rule_cloudformation(): 'Port': '443', 'Protocol': 'HTTPS', 'StatusCode': 'HTTP_301', } },]) + + +@mock_elbv2 +@mock_ec2 +def test_cognito_action_listener_rule(): + conn = boto3.client('elbv2', region_name='us-east-1') + ec2 = boto3.resource('ec2', region_name='us-east-1') + + security_group = ec2.create_security_group( + GroupName='a-security-group', Description='First One') + vpc = ec2.create_vpc(CidrBlock='172.28.7.0/24', InstanceTenancy='default') + subnet1 = ec2.create_subnet( + VpcId=vpc.id, + CidrBlock='172.28.7.192/26', + AvailabilityZone='us-east-1a') + subnet2 = ec2.create_subnet( + VpcId=vpc.id, + CidrBlock='172.28.7.128/26', + AvailabilityZone='us-east-1b') + + response = conn.create_load_balancer( + Name='my-lb', + Subnets=[subnet1.id, subnet2.id], + SecurityGroups=[security_group.id], + Scheme='internal', + Tags=[{'Key': 'key_name', 'Value': 'a_value'}]) + load_balancer_arn = response.get('LoadBalancers')[0].get('LoadBalancerArn') + + action = { + 'Type': 'authenticate-cognito', + 'AuthenticateCognitoConfig': { + 'UserPoolArn': 'arn:aws:cognito-idp:us-east-1:123456789012:userpool/us-east-1_ABCD1234', + 'UserPoolClientId': 'abcd1234abcd', + 'UserPoolDomain': 'testpool', + } + } + response = conn.create_listener(LoadBalancerArn=load_balancer_arn, + Protocol='HTTP', + Port=80, + DefaultActions=[action]) + + listener = response.get('Listeners')[0] + listener.get('DefaultActions')[0].should.equal(action) + listener_arn = listener.get('ListenerArn') + + describe_rules_response = conn.describe_rules(ListenerArn=listener_arn) + describe_rules_response['Rules'][0]['Actions'][0].should.equal(action) + + describe_listener_response = conn.describe_listeners(ListenerArns=[listener_arn, ]) + describe_listener_actions = describe_listener_response['Listeners'][0]['DefaultActions'][0] + describe_listener_actions.should.equal(action) + + +@mock_elbv2 +@mock_cloudformation +def test_cognito_action_listener_rule_cloudformation(): + cnf_conn = boto3.client('cloudformation', region_name='us-east-1') + elbv2_client = boto3.client('elbv2', region_name='us-east-1') + + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "ECS Cluster Test CloudFormation", + "Resources": { + "testVPC": { + "Type": "AWS::EC2::VPC", + "Properties": { + "CidrBlock": "10.0.0.0/16", + }, + }, + "subnet1": { + "Type": "AWS::EC2::Subnet", + "Properties": { + "CidrBlock": "10.0.0.0/24", + "VpcId": {"Ref": "testVPC"}, + "AvalabilityZone": "us-east-1b", + }, + }, + "subnet2": { + "Type": "AWS::EC2::Subnet", + "Properties": { + "CidrBlock": "10.0.1.0/24", + "VpcId": {"Ref": "testVPC"}, + "AvalabilityZone": "us-east-1b", + }, + }, + "testLb": { + "Type": "AWS::ElasticLoadBalancingV2::LoadBalancer", + "Properties": { + "Name": "my-lb", + "Subnets": [{"Ref": "subnet1"}, {"Ref": "subnet2"}], + "Type": "application", + "SecurityGroups": [], + } + }, + "testListener": { + "Type": "AWS::ElasticLoadBalancingV2::Listener", + "Properties": { + "LoadBalancerArn": {"Ref": "testLb"}, + "Port": 80, + "Protocol": "HTTP", + "DefaultActions": [{ + "Type": "authenticate-cognito", + "AuthenticateCognitoConfig": { + 'UserPoolArn': 'arn:aws:cognito-idp:us-east-1:123456789012:userpool/us-east-1_ABCD1234', + 'UserPoolClientId': 'abcd1234abcd', + 'UserPoolDomain': 'testpool', + } + }] + } + + } + } + } + template_json = json.dumps(template) + cnf_conn.create_stack(StackName="test-stack", TemplateBody=template_json) + + describe_load_balancers_response = elbv2_client.describe_load_balancers(Names=['my-lb',]) + load_balancer_arn = describe_load_balancers_response['LoadBalancers'][0]['LoadBalancerArn'] + describe_listeners_response = elbv2_client.describe_listeners(LoadBalancerArn=load_balancer_arn) + + describe_listeners_response['Listeners'].should.have.length_of(1) + describe_listeners_response['Listeners'][0]['DefaultActions'].should.equal([{ + 'Type': 'authenticate-cognito', + "AuthenticateCognitoConfig": { + 'UserPoolArn': 'arn:aws:cognito-idp:us-east-1:123456789012:userpool/us-east-1_ABCD1234', + 'UserPoolClientId': 'abcd1234abcd', + 'UserPoolDomain': 'testpool', + } + },]) From 5347a577da36a114cb8931430737bd8d42bf669a Mon Sep 17 00:00:00 2001 From: Earl Robinson Date: Wed, 14 Aug 2019 08:19:32 -0400 Subject: [PATCH 12/42] restore KeyId to kms decrypt response regression introduced in #2071 "KMS generate_data_key" --- moto/kms/responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/kms/responses.py b/moto/kms/responses.py index 8cd6e7663..53012b7f8 100644 --- a/moto/kms/responses.py +++ b/moto/kms/responses.py @@ -238,7 +238,7 @@ class KmsResponse(BaseResponse): value = self.parameters.get("CiphertextBlob") try: - return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8")}) + return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8"), 'KeyId': 'key_id'}) except UnicodeDecodeError: # Generate data key will produce random bytes which when decrypted is still returned as base64 return json.dumps({"Plaintext": value}) From bbaff4b273f37386f660eb3acde7edac1b04854b Mon Sep 17 00:00:00 2001 From: Earl Robinson Date: Wed, 14 Aug 2019 08:39:54 -0400 Subject: [PATCH 13/42] restore KeyId to test_decrypt in test_kms --- tests/test_kms/test_kms.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_kms/test_kms.py b/tests/test_kms/test_kms.py index 8fe0620f1..f189fbe41 100644 --- a/tests/test_kms/test_kms.py +++ b/tests/test_kms/test_kms.py @@ -191,6 +191,7 @@ def test_decrypt(): conn = boto.kms.connect_to_region('us-west-2') response = conn.decrypt('ZW5jcnlwdG1l'.encode('utf-8')) response['Plaintext'].should.equal(b'encryptme') + response['KeyId'].should.equal('key_id') @mock_kms_deprecated From aa3b6085d1cd535a319c1f08192b5ed1bfa5db6b Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Wed, 14 Aug 2019 16:11:05 -0500 Subject: [PATCH 14/42] Add basic endpoints for EC2 Launch Templates Specifically, add the CreateLaunchTemplate, CreateLaunchTemplateVersion, DescribeLaunchTemplates, and DescribeLaunchTemplateVersions endpoints. --- moto/ec2/exceptions.py | 7 + moto/ec2/models.py | 87 ++++++++- moto/ec2/responses/__init__.py | 2 + moto/ec2/responses/launch_templates.py | 243 ++++++++++++++++++++++++ moto/ec2/utils.py | 5 + tests/test_ec2/test_launch_templates.py | 215 +++++++++++++++++++++ 6 files changed, 557 insertions(+), 2 deletions(-) create mode 100644 moto/ec2/responses/launch_templates.py create mode 100644 tests/test_ec2/test_launch_templates.py diff --git a/moto/ec2/exceptions.py b/moto/ec2/exceptions.py index 5d5ccd844..453f75d1d 100644 --- a/moto/ec2/exceptions.py +++ b/moto/ec2/exceptions.py @@ -523,3 +523,10 @@ class OperationNotPermitted3(EC2ClientError): pcx_id, acceptor_region) ) + +class InvalidLaunchTemplateNameError(EC2ClientError): + def __init__(self): + super(InvalidLaunchTemplateNameError, self).__init__( + "InvalidLaunchTemplateName.AlreadyExistsException", + "Launch template name already in use." + ) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index 41a84ec48..2310585ac 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -19,7 +19,8 @@ from boto.ec2.instance import Instance as BotoInstance, Reservation from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType from boto.ec2.spotinstancerequest import SpotInstanceRequest as BotoSpotRequest from boto.ec2.launchspecification import LaunchSpecification - +from xml.etree import ElementTree +from xml.dom import minidom from moto.compat import OrderedDict from moto.core import BaseBackend @@ -49,6 +50,7 @@ from .exceptions import ( InvalidKeyPairDuplicateError, InvalidKeyPairFormatError, InvalidKeyPairNameError, + InvalidLaunchTemplateNameError, InvalidNetworkAclIdError, InvalidNetworkAttachmentIdError, InvalidNetworkInterfaceIdError, @@ -98,6 +100,7 @@ from .utils import ( random_internet_gateway_id, random_ip, random_ipv6_cidr, + random_launch_template_id, random_nat_gateway_id, random_key_pair, random_private_ip, @@ -4112,6 +4115,84 @@ class NatGatewayBackend(object): def delete_nat_gateway(self, nat_gateway_id): return self.nat_gateways.pop(nat_gateway_id) +class LaunchTemplateVersion(object): + def __init__(self, template, number, data, description): + self.template = template + self.number = number + self.data = data + self.description = description + self.create_time = utc_date_and_time() + +class LaunchTemplate(TaggedEC2Resource): + def __init__(self, backend, name, template_data, version_description): + self.ec2_backend = backend + self.name = name + self.id = random_launch_template_id() + self.create_time = utc_date_and_time() + + self.versions = [] + self.create_version(template_data, version_description) + self.default_version_number = 1 + + def create_version(self, data, description): + num = len(self.versions) + 1 + version = LaunchTemplateVersion(self, num, data, description) + self.versions.append(version) + return version + + def is_default(self, version): + return self.default_version == version.number + + def get_version(self, num): + return self.versions[num-1] + + def default_version(self): + return self.versions[self.default_version_number-1] + + def latest_version(self): + return self.versions[-1] + + @property + def latest_version_number(self): + return self.latest_version().number + + def get_filter_value(self, filter_name): + if filter_name == 'launch-template-name': + return self.name + else: + return super(LaunchTemplate, self).get_filter_value( + filter_name, "DescribeLaunchTemplates") + +class LaunchTemplateBackend(object): + def __init__(self): + self.launch_templates_by_name = {} + self.launch_templates_by_id = {} + super(LaunchTemplateBackend, self).__init__() + + def create_launch_template(self, name, description, template_data): + if name in self.launch_templates_by_name: + raise InvalidLaunchTemplateNameError() + template = LaunchTemplate(self, name, template_data, description) + self.launch_templates_by_id[template.id] = template + self.launch_templates_by_name[template.name] = template + return template + + def get_launch_template_by_name(self, name): + return self.launch_templates_by_name[name] + + def get_launch_template_by_id(self, templ_id): + return self.launch_templates_by_id[templ_id] + + def get_launch_templates(self, template_names=None, template_ids=None, filters=None): + if template_ids: + templates = [self.launch_templates_by_id[tid] for tid in template_ids] + elif template_names: + templates = [self.launch_templates_by_name[name] for name in template_names] + else: + templates = list(self.launch_templates_by_name.values()) + + return generic_filter(filters, templates) + class EC2Backend(BaseBackend, InstanceBackend, TagBackend, EBSBackend, RegionsAndZonesBackend, SecurityGroupBackend, AmiBackend, @@ -4122,7 +4203,7 @@ class EC2Backend(BaseBackend, InstanceBackend, TagBackend, EBSBackend, VPCGatewayAttachmentBackend, SpotFleetBackend, SpotRequestBackend, ElasticAddressBackend, KeyPairBackend, DHCPOptionsSetBackend, NetworkAclBackend, VpnGatewayBackend, - CustomerGatewayBackend, NatGatewayBackend): + CustomerGatewayBackend, NatGatewayBackend, LaunchTemplateBackend): def __init__(self, region_name): self.region_name = region_name super(EC2Backend, self).__init__() @@ -4177,6 +4258,8 @@ class EC2Backend(BaseBackend, InstanceBackend, TagBackend, EBSBackend, elif resource_prefix == EC2_RESOURCE_TO_PREFIX['internet-gateway']: self.describe_internet_gateways( internet_gateway_ids=[resource_id]) + elif resource_prefix == EC2_RESOURCE_TO_PREFIX['launch-template']: + self.get_launch_template_by_id(resource_id) elif resource_prefix == EC2_RESOURCE_TO_PREFIX['network-acl']: self.get_all_network_acls() elif resource_prefix == EC2_RESOURCE_TO_PREFIX['network-interface']: diff --git a/moto/ec2/responses/__init__.py b/moto/ec2/responses/__init__.py index 1222a7ef8..d0648eb50 100644 --- a/moto/ec2/responses/__init__.py +++ b/moto/ec2/responses/__init__.py @@ -14,6 +14,7 @@ from .instances import InstanceResponse from .internet_gateways import InternetGateways from .ip_addresses import IPAddresses from .key_pairs import KeyPairs +from .launch_templates import LaunchTemplates from .monitoring import Monitoring from .network_acls import NetworkACLs from .placement_groups import PlacementGroups @@ -49,6 +50,7 @@ class EC2Response( InternetGateways, IPAddresses, KeyPairs, + LaunchTemplates, Monitoring, NetworkACLs, PlacementGroups, diff --git a/moto/ec2/responses/launch_templates.py b/moto/ec2/responses/launch_templates.py new file mode 100644 index 000000000..ebce54294 --- /dev/null +++ b/moto/ec2/responses/launch_templates.py @@ -0,0 +1,243 @@ +import json +import six +import uuid +from moto.core.responses import BaseResponse +from moto.ec2.models import OWNER_ID +from moto.ec2.exceptions import FilterNotImplementedError +from moto.ec2.utils import filters_from_querystring + +from xml.etree import ElementTree +from xml.dom import minidom + + +def xml_root(name): + root = ElementTree.Element(name, { + "xmlns": "http://ec2.amazonaws.com/doc/2016-11-15/" + }) + request_id = str(uuid.uuid4()) + "example" + ElementTree.SubElement(root, "requestId").text = request_id + + return root + +def xml_serialize(tree, key, value): + if key: + name = key[0].lower() + key[1:] + if isinstance(value, list): + if name[-1] == 's': + name = name[:-1] + + name = name + 'Set' + + node = ElementTree.SubElement(tree, name) + else: + node = tree + + if isinstance(value, (str, int, float)): + node.text = str(value) + elif isinstance(value, bool): + node.text = str(value).lower() + elif isinstance(value, dict): + for dictkey, dictvalue in six.iteritems(value): + xml_serialize(node, dictkey, dictvalue) + elif isinstance(value, list): + for item in value: + xml_serialize(node, 'item', item) + +def pretty_xml(tree): + rough = ElementTree.tostring(tree, 'utf-8') + parsed = minidom.parseString(rough) + return parsed.toprettyxml(indent=' ') + +def parse_object(raw_data): + out_data = {} + for key, value in six.iteritems(raw_data): + key_fix_splits = key.split("_") + l = len(key_fix_splits) + + new_key = "" + for i in range(0, l): + new_key += key_fix_splits[i][0].upper() + key_fix_splits[i][1:] + + data = out_data + splits = new_key.split(".") + for split in splits[:-1]: + if split not in data: + data[split] = {} + data = data[split] + + data[splits[-1]] = value + + out_data = parse_lists(out_data) + return out_data + +def parse_lists(data): + for key, value in six.iteritems(data): + if isinstance(value, dict): + keys = data[key].keys() + is_list = all(map(lambda k: k.isnumeric(), keys)) + + if is_list: + new_value = [] + keys = sorted(list(keys)) + for k in keys: + lvalue = value[k] + if isinstance(lvalue, dict): + lvalue = parse_lists(lvalue) + new_value.append(lvalue) + data[key] = new_value + return data + +class LaunchTemplates(BaseResponse): + def create_launch_template(self): + name = self._get_param('LaunchTemplateName') + version_description = self._get_param('VersionDescription') + tag_spec = self._get_param('TagSpecifications') + + raw_template_data = self._get_dict_param('LaunchTemplateData.') + parsed_template_data = parse_object(raw_template_data) + + if tag_spec: + if 'TagSpecifications' not in parsed_template_data: + parsed_template_data['TagSpecifications'] = [] + parsed_template_data['TagSpecifications'].extend(tag_spec) + + if self.is_not_dryrun('CreateLaunchTemplate'): + template = self.ec2_backend.create_launch_template(name, version_description, parsed_template_data) + version = template.default_version() + + tree = xml_root("CreateLaunchTemplateResponse") + xml_serialize(tree, "launchTemplate", { + "createTime": version.create_time, + "createdBy": "arn:aws:iam::{OWNER_ID}:root".format(OWNER_ID=OWNER_ID), + "defaultVersionNumber": template.default_version_number, + "latestVersionNumber": version.number, + "launchTemplateId": template.id, + "launchTemplateName": template.name + }) + + return pretty_xml(tree) + + def create_launch_template_version(self): + name = self._get_param('LaunchTemplateName') + tmpl_id = self._get_param('LaunchTemplateId') + if name: + template = self.ec2_backend.get_launch_template_by_name(name) + if tmpl_id: + template = self.ec2_backend.get_launch_template_by_id(tmpl_id) + + version_description = self._get_param('VersionDescription') + tag_spec = self._get_param('TagSpecifications') + # source_version = self._get_int_param('SourceVersion') + + raw_template_data = self._get_dict_param('LaunchTemplateData.') + template_data = parse_object(raw_template_data) + + if self.is_not_dryrun('CreateLaunchTemplate'): + version = template.create_version(template_data, version_description) + + tree = xml_root("CreateLaunchTemplateVersionResponse") + xml_serialize(tree, "launchTemplateVersion", { + "createTime": version.create_time, + "createdBy": "arn:aws:iam::{OWNER_ID}:root".format(OWNER_ID=OWNER_ID), + "defaultVersion": template.is_default(version), + "launchTemplateData": version.data, + "launchTemplateId": template.id, + "launchTemplateName": template.name, + "versionDescription": version.description, + "versionNumber": version.number, + }) + return pretty_xml(tree) + + + # def delete_launch_template(self): + # pass + + # def delete_launch_template_versions(self): + # pass + + def describe_launch_template_versions(self): + name = self._get_param('LaunchTemplateName') + template_id = self._get_param('LaunchTemplateId') + if name: + template = self.ec2_backend.get_launch_template_by_name(name) + if template_id: + template = self.ec2_backend.get_launch_template_by_id(template_id) + + max_results = self._get_int_param("MaxResults", 15) + versions = self._get_multi_param("Versions") + min_version = self._get_int_param("MinVersion") + max_version = self._get_int_param("MaxVersion") + + filters = filters_from_querystring(self.querystring) + if filters: + raise FilterNotImplementedError("all filters", "DescribeLaunchTemplateVersions") + + if self.is_not_dryrun('DescribeLaunchTemplateVersions'): + tree = ElementTree.Element("DescribeLaunchTemplateVersionsResponse", { + "xmlns": "http://ec2.amazonaws.com/doc/2016-11-15/", + }) + request_id = ElementTree.SubElement(tree, "requestId") + request_id.text = "65cadec1-b364-4354-8ca8-4176dexample" + + versions_node = ElementTree.SubElement(tree, "launchTemplateVersionSet") + + ret_versions = [] + if versions: + for v in versions: + ret_versions.append(template.get_version(int(v))) + elif min_version: + if max_version: + vMax = max_version + else: + vMax = min_version + max_results + + ret_versions = template.versions[min_version-1:vMax-1] + elif max_version: + ret_versions = template.versions[0:max_version-1] + else: + ret_versions = template.versions + + ret_versions = ret_versions[:max_results] + + for version in ret_versions: + xml_serialize(versions_node, "item", { + "createTime": version.create_time, + "createdBy": "arn:aws:iam::{OWNER_ID}:root".format(OWNER_ID=OWNER_ID), + "defaultVersion": True, + "launchTemplateData": version.data, + "launchTemplateId": template.id, + "launchTemplateName": template.name, + "versionDescription": version.description, + "versionNumber": version.number, + }) + + return pretty_xml(tree) + + def describe_launch_templates(self): + max_results = self._get_int_param("MaxResults", 15) + template_names = self._get_multi_param("LaunchTemplateName") + template_ids = self._get_multi_param("LaunchTemplateId") + filters = filters_from_querystring(self.querystring) + + if self.is_not_dryrun("DescribeLaunchTemplates"): + tree = ElementTree.Element("DescribeLaunchTemplatesResponse") + templates_node = ElementTree.SubElement(tree, "launchTemplates") + + templates = self.ec2_backend.get_launch_templates(template_names=template_names, template_ids=template_ids, filters=filters) + + templates = templates[:max_results] + + for template in templates: + xml_serialize(templates_node, "item", { + "createTime": template.create_time, + "createdBy": "arn:aws:iam::{OWNER_ID}:root".format(OWNER_ID=OWNER_ID), + "defaultVersionNumber": template.default_version_number, + "latestVersionNumber": template.latest_version_number, + "launchTemplateId": template.id, + "launchTemplateName": template.name, + }) + + return pretty_xml(tree) + + # def modify_launch_template(self): + # pass diff --git a/moto/ec2/utils.py b/moto/ec2/utils.py index a998f18ef..e67cb39f4 100644 --- a/moto/ec2/utils.py +++ b/moto/ec2/utils.py @@ -20,6 +20,7 @@ EC2_RESOURCE_TO_PREFIX = { 'image': 'ami', 'instance': 'i', 'internet-gateway': 'igw', + 'launch-template': 'lt', 'nat-gateway': 'nat', 'network-acl': 'acl', 'network-acl-subnet-assoc': 'aclassoc', @@ -161,6 +162,10 @@ def random_nat_gateway_id(): return random_id(prefix=EC2_RESOURCE_TO_PREFIX['nat-gateway'], size=17) +def random_launch_template_id(): + return random_id(prefix=EC2_RESOURCE_TO_PREFIX['launch-template'], size=17) + + def random_public_ip(): return '54.214.{0}.{1}'.format(random.choice(range(255)), random.choice(range(255))) diff --git a/tests/test_ec2/test_launch_templates.py b/tests/test_ec2/test_launch_templates.py new file mode 100644 index 000000000..e2e160b6f --- /dev/null +++ b/tests/test_ec2/test_launch_templates.py @@ -0,0 +1,215 @@ +import boto3 +import sure # noqa + +from nose.tools import assert_raises +from botocore.client import ClientError + +from moto import mock_ec2 + +@mock_ec2 +def test_launch_template_create(): + cli = boto3.client("ec2") + + resp = cli.create_launch_template( + LaunchTemplateName="test-template", + + # the absolute minimum needed to create a template without other resources + LaunchTemplateData={ + "TagSpecifications": [{ + "ResourceType": "instance", + "Tags": [{ + "Key": "test", + "Value": "value", + }], + }], + }, + ) + + resp.should.have.key("LaunchTemplate") + lt = resp["LaunchTemplate"] + lt["LaunchTemplateName"].should.equal("test-template") + lt["DefaultVersionNumber"].should.equal(1) + lt["LatestVersionNumber"].should.equal(1) + + with assert_raises(ClientError) as ex: + cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "TagSpecifications": [{ + "ResourceType": "instance", + "Tags": [{ + "Key": "test", + "Value": "value", + }], + }], + }, + ) + + str(ex.exception).should.equal( + 'An error occurred (InvalidLaunchTemplateName.AlreadyExistsException) when calling the CreateLaunchTemplate operation: Launch template name already in use.') + +@mock_ec2 +def test_describe_launch_template_versions(): + template_data = { + "ImageId": "ami-abc123", + "DisableApiTermination": False, + "TagSpecifications": [{ + "ResourceType": "instance", + "Tags": [{ + "Key": "test", + "Value": "value", + }], + }], + "SecurityGroupIds": [ + "sg-1234", + "sg-ab5678", + ], + } + + cli = boto3.client("ec2") + + create_resp = cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData=template_data) + + # test using name + resp = cli.describe_launch_template_versions( + LaunchTemplateName="test-template", + Versions=['1']) + + templ = resp["LaunchTemplateVersions"][0]["LaunchTemplateData"] + templ.should.equal(template_data) + + # test using id + resp = cli.describe_launch_template_versions( + LaunchTemplateId=create_resp["LaunchTemplate"]["LaunchTemplateId"], + Versions=['1']) + + templ = resp["LaunchTemplateVersions"][0]["LaunchTemplateData"] + templ.should.equal(template_data) + +@mock_ec2 +def test_create_launch_template_version(): + cli = boto3.client("ec2") + + create_resp = cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + version_resp = cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-def456" + }, + VersionDescription="new ami") + + version_resp.should.have.key("LaunchTemplateVersion") + version = version_resp["LaunchTemplateVersion"] + version["DefaultVersion"].should.equal(False) + version["LaunchTemplateId"].should.equal(create_resp["LaunchTemplate"]["LaunchTemplateId"]) + version["VersionDescription"].should.equal("new ami") + version["VersionNumber"].should.equal(2) + +@mock_ec2 +def test_describe_template_versions_with_multiple_versions(): + cli = boto3.client("ec2") + + cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-def456" + }, + VersionDescription="new ami") + + resp = cli.describe_launch_template_versions( + LaunchTemplateName="test-template") + + resp["LaunchTemplateVersions"].should.have.length_of(2) + resp["LaunchTemplateVersions"][0]["LaunchTemplateData"]["ImageId"].should.equal("ami-abc123") + resp["LaunchTemplateVersions"][1]["LaunchTemplateData"]["ImageId"].should.equal("ami-def456") + +@mock_ec2 +def test_describe_launch_templates(): + cli = boto3.client("ec2") + + lt_ids = [] + r = cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + lt_ids.append(r["LaunchTemplate"]["LaunchTemplateId"]) + + r = cli.create_launch_template( + LaunchTemplateName="test-template2", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + lt_ids.append(r["LaunchTemplate"]["LaunchTemplateId"]) + + # general call, all templates + resp = cli.describe_launch_templates() + resp.should.have.key("LaunchTemplates") + resp["LaunchTemplates"].should.have.length_of(2) + resp["LaunchTemplates"][0]["LaunchTemplateName"].should.equal("test-template") + resp["LaunchTemplates"][1]["LaunchTemplateName"].should.equal("test-template2") + + # filter by names + resp = cli.describe_launch_templates( + LaunchTemplateNames=["test-template2", "test-template"]) + resp.should.have.key("LaunchTemplates") + resp["LaunchTemplates"].should.have.length_of(2) + resp["LaunchTemplates"][0]["LaunchTemplateName"].should.equal("test-template2") + resp["LaunchTemplates"][1]["LaunchTemplateName"].should.equal("test-template") + + # filter by ids + resp = cli.describe_launch_templates(LaunchTemplateIds=lt_ids) + resp.should.have.key("LaunchTemplates") + resp["LaunchTemplates"].should.have.length_of(2) + resp["LaunchTemplates"][0]["LaunchTemplateName"].should.equal("test-template") + resp["LaunchTemplates"][1]["LaunchTemplateName"].should.equal("test-template2") + +@mock_ec2 +def test_describe_launch_templates_with_filters(): + cli = boto3.client("ec2") + + r = cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + cli.create_tags( + Resources=[r["LaunchTemplate"]["LaunchTemplateId"]], + Tags=[ + {"Key": "tag1", "Value": "a value"}, + {"Key": "another-key", "Value": "this value"}, + ]) + + cli.create_launch_template( + LaunchTemplateName="no-tags", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + resp = cli.describe_launch_templates(Filters=[{ + "Name": "tag:tag1", "Values": ["a value"] + }]) + + resp["LaunchTemplates"].should.have.length_of(1) + resp["LaunchTemplates"][0]["LaunchTemplateName"].should.equal("test-template") + + resp = cli.describe_launch_templates(Filters=[{ + "Name": "launch-template-name", "Values": ["no-tags"] + }]) + resp["LaunchTemplates"].should.have.length_of(1) + resp["LaunchTemplates"][0]["LaunchTemplateName"].should.equal("no-tags") + From f939531ae9299c8c0b40be84d9bc335dd3b590c4 Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Wed, 14 Aug 2019 16:19:30 -0500 Subject: [PATCH 15/42] Fun with whitespace (flake8 violation fixes) --- moto/ec2/exceptions.py | 1 + moto/ec2/models.py | 9 +++++---- moto/ec2/responses/launch_templates.py | 20 ++++++++++++-------- 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/moto/ec2/exceptions.py b/moto/ec2/exceptions.py index 453f75d1d..b7a49cc57 100644 --- a/moto/ec2/exceptions.py +++ b/moto/ec2/exceptions.py @@ -524,6 +524,7 @@ class OperationNotPermitted3(EC2ClientError): acceptor_region) ) + class InvalidLaunchTemplateNameError(EC2ClientError): def __init__(self): super(InvalidLaunchTemplateNameError, self).__init__( diff --git a/moto/ec2/models.py b/moto/ec2/models.py index 2310585ac..3cb0bad93 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -19,8 +19,6 @@ from boto.ec2.instance import Instance as BotoInstance, Reservation from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType from boto.ec2.spotinstancerequest import SpotInstanceRequest as BotoSpotRequest from boto.ec2.launchspecification import LaunchSpecification -from xml.etree import ElementTree -from xml.dom import minidom from moto.compat import OrderedDict from moto.core import BaseBackend @@ -4115,6 +4113,7 @@ class NatGatewayBackend(object): def delete_nat_gateway(self, nat_gateway_id): return self.nat_gateways.pop(nat_gateway_id) + class LaunchTemplateVersion(object): def __init__(self, template, number, data, description): self.template = template @@ -4123,6 +4122,7 @@ class LaunchTemplateVersion(object): self.description = description self.create_time = utc_date_and_time() + class LaunchTemplate(TaggedEC2Resource): def __init__(self, backend, name, template_data, version_description): self.ec2_backend = backend @@ -4144,10 +4144,10 @@ class LaunchTemplate(TaggedEC2Resource): return self.default_version == version.number def get_version(self, num): - return self.versions[num-1] + return self.versions[num - 1] def default_version(self): - return self.versions[self.default_version_number-1] + return self.versions[self.default_version_number - 1] def latest_version(self): return self.versions[-1] @@ -4163,6 +4163,7 @@ class LaunchTemplate(TaggedEC2Resource): return super(LaunchTemplate, self).get_filter_value( filter_name, "DescribeLaunchTemplates") + class LaunchTemplateBackend(object): def __init__(self): self.launch_templates_by_name = {} diff --git a/moto/ec2/responses/launch_templates.py b/moto/ec2/responses/launch_templates.py index ebce54294..d1bfaa6d4 100644 --- a/moto/ec2/responses/launch_templates.py +++ b/moto/ec2/responses/launch_templates.py @@ -1,4 +1,3 @@ -import json import six import uuid from moto.core.responses import BaseResponse @@ -19,6 +18,7 @@ def xml_root(name): return root + def xml_serialize(tree, key, value): if key: name = key[0].lower() + key[1:] @@ -43,19 +43,21 @@ def xml_serialize(tree, key, value): for item in value: xml_serialize(node, 'item', item) + def pretty_xml(tree): rough = ElementTree.tostring(tree, 'utf-8') parsed = minidom.parseString(rough) return parsed.toprettyxml(indent=' ') + def parse_object(raw_data): out_data = {} for key, value in six.iteritems(raw_data): key_fix_splits = key.split("_") - l = len(key_fix_splits) + key_len = len(key_fix_splits) new_key = "" - for i in range(0, l): + for i in range(0, key_len): new_key += key_fix_splits[i][0].upper() + key_fix_splits[i][1:] data = out_data @@ -70,6 +72,7 @@ def parse_object(raw_data): out_data = parse_lists(out_data) return out_data + def parse_lists(data): for key, value in six.iteritems(data): if isinstance(value, dict): @@ -87,6 +90,7 @@ def parse_lists(data): data[key] = new_value return data + class LaunchTemplates(BaseResponse): def create_launch_template(self): name = self._get_param('LaunchTemplateName') @@ -126,8 +130,6 @@ class LaunchTemplates(BaseResponse): template = self.ec2_backend.get_launch_template_by_id(tmpl_id) version_description = self._get_param('VersionDescription') - tag_spec = self._get_param('TagSpecifications') - # source_version = self._get_int_param('SourceVersion') raw_template_data = self._get_dict_param('LaunchTemplateData.') template_data = parse_object(raw_template_data) @@ -148,7 +150,6 @@ class LaunchTemplates(BaseResponse): }) return pretty_xml(tree) - # def delete_launch_template(self): # pass @@ -191,9 +192,12 @@ class LaunchTemplates(BaseResponse): else: vMax = min_version + max_results - ret_versions = template.versions[min_version-1:vMax-1] + vMin = min_version - 1 + vMax = vMax - 1 + ret_versions = template.versions[vMin:vMax] elif max_version: - ret_versions = template.versions[0:max_version-1] + vMax = max_version - 1 + ret_versions = template.versions[:vMax] else: ret_versions = template.versions From 1de63b1691f4f338ee01a8e0d17affc6853f1c9f Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Wed, 14 Aug 2019 16:32:01 -0500 Subject: [PATCH 16/42] Specify region in launch template tests --- tests/test_ec2/test_launch_templates.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_ec2/test_launch_templates.py b/tests/test_ec2/test_launch_templates.py index e2e160b6f..ae5214c0d 100644 --- a/tests/test_ec2/test_launch_templates.py +++ b/tests/test_ec2/test_launch_templates.py @@ -8,7 +8,7 @@ from moto import mock_ec2 @mock_ec2 def test_launch_template_create(): - cli = boto3.client("ec2") + cli = boto3.client("ec2", region_name="us-east-1") resp = cli.create_launch_template( LaunchTemplateName="test-template", @@ -66,7 +66,7 @@ def test_describe_launch_template_versions(): ], } - cli = boto3.client("ec2") + cli = boto3.client("ec2", region_name="us-east-1") create_resp = cli.create_launch_template( LaunchTemplateName="test-template", @@ -90,7 +90,7 @@ def test_describe_launch_template_versions(): @mock_ec2 def test_create_launch_template_version(): - cli = boto3.client("ec2") + cli = boto3.client("ec2", region_name="us-east-1") create_resp = cli.create_launch_template( LaunchTemplateName="test-template", @@ -114,7 +114,7 @@ def test_create_launch_template_version(): @mock_ec2 def test_describe_template_versions_with_multiple_versions(): - cli = boto3.client("ec2") + cli = boto3.client("ec2", region_name="us-east-1") cli.create_launch_template( LaunchTemplateName="test-template", @@ -138,7 +138,7 @@ def test_describe_template_versions_with_multiple_versions(): @mock_ec2 def test_describe_launch_templates(): - cli = boto3.client("ec2") + cli = boto3.client("ec2", region_name="us-east-1") lt_ids = [] r = cli.create_launch_template( @@ -179,7 +179,7 @@ def test_describe_launch_templates(): @mock_ec2 def test_describe_launch_templates_with_filters(): - cli = boto3.client("ec2") + cli = boto3.client("ec2", region_name="us-east-1") r = cli.create_launch_template( LaunchTemplateName="test-template", From 5f80014332a3303d54be7189bba31d7ba10f28af Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Wed, 14 Aug 2019 17:32:59 -0500 Subject: [PATCH 17/42] Serialize unicode as string in python2 --- moto/ec2/responses/launch_templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/ec2/responses/launch_templates.py b/moto/ec2/responses/launch_templates.py index d1bfaa6d4..14337d17f 100644 --- a/moto/ec2/responses/launch_templates.py +++ b/moto/ec2/responses/launch_templates.py @@ -32,7 +32,7 @@ def xml_serialize(tree, key, value): else: node = tree - if isinstance(value, (str, int, float)): + if isinstance(value, (str, int, float, six.text_type)): node.text = str(value) elif isinstance(value, bool): node.text = str(value).lower() From ed82264806b0d47146853a683b0ae5a879ef35a0 Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Wed, 14 Aug 2019 17:31:57 -0500 Subject: [PATCH 18/42] Rework LaunchTemplateBackend to be keep only one copy of a template, and be ordered The original LaunchTemplateBackend kept two copies of a template, one for referencing it by name and one for referencing it by id. This change switches to using one copy, by id, and adding a lookup dict for mapping names to ids. Additionally, to fix the python2 test ordering issues, the launch template dict was changed to an OrderedDict. --- moto/ec2/models.py | 33 +++++++++++++++----------- moto/ec2/responses/launch_templates.py | 4 ++-- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/moto/ec2/models.py b/moto/ec2/models.py index 3cb0bad93..10d6f2b28 100644 --- a/moto/ec2/models.py +++ b/moto/ec2/models.py @@ -4166,31 +4166,36 @@ class LaunchTemplate(TaggedEC2Resource): class LaunchTemplateBackend(object): def __init__(self): - self.launch_templates_by_name = {} - self.launch_templates_by_id = {} + self.launch_template_name_to_ids = {} + self.launch_templates = OrderedDict() + self.launch_template_insert_order = [] super(LaunchTemplateBackend, self).__init__() def create_launch_template(self, name, description, template_data): - if name in self.launch_templates_by_name: + if name in self.launch_template_name_to_ids: raise InvalidLaunchTemplateNameError() template = LaunchTemplate(self, name, template_data, description) - self.launch_templates_by_id[template.id] = template - self.launch_templates_by_name[template.name] = template + self.launch_templates[template.id] = template + self.launch_template_name_to_ids[template.name] = template.id + self.launch_template_insert_order.append(template.id) return template - def get_launch_template_by_name(self, name): - return self.launch_templates_by_name[name] + def get_launch_template(self, template_id): + return self.launch_templates[template_id] - def get_launch_template_by_id(self, templ_id): - return self.launch_templates_by_id[templ_id] + def get_launch_template_by_name(self, name): + return self.get_launch_template(self.launch_template_name_to_ids[name]) def get_launch_templates(self, template_names=None, template_ids=None, filters=None): + if template_names and not template_ids: + template_ids = [] + for name in template_names: + template_ids.append(self.launch_template_name_to_ids[name]) + if template_ids: - templates = [self.launch_templates_by_id[tid] for tid in template_ids] - elif template_names: - templates = [self.launch_templates_by_name[name] for name in template_names] + templates = [self.launch_templates[tid] for tid in template_ids] else: - templates = list(self.launch_templates_by_name.values()) + templates = list(self.launch_templates.values()) return generic_filter(filters, templates) @@ -4260,7 +4265,7 @@ class EC2Backend(BaseBackend, InstanceBackend, TagBackend, EBSBackend, self.describe_internet_gateways( internet_gateway_ids=[resource_id]) elif resource_prefix == EC2_RESOURCE_TO_PREFIX['launch-template']: - self.get_launch_template_by_id(resource_id) + self.get_launch_template(resource_id) elif resource_prefix == EC2_RESOURCE_TO_PREFIX['network-acl']: self.get_all_network_acls() elif resource_prefix == EC2_RESOURCE_TO_PREFIX['network-interface']: diff --git a/moto/ec2/responses/launch_templates.py b/moto/ec2/responses/launch_templates.py index 14337d17f..ab6f54be1 100644 --- a/moto/ec2/responses/launch_templates.py +++ b/moto/ec2/responses/launch_templates.py @@ -127,7 +127,7 @@ class LaunchTemplates(BaseResponse): if name: template = self.ec2_backend.get_launch_template_by_name(name) if tmpl_id: - template = self.ec2_backend.get_launch_template_by_id(tmpl_id) + template = self.ec2_backend.get_launch_template(tmpl_id) version_description = self._get_param('VersionDescription') @@ -162,7 +162,7 @@ class LaunchTemplates(BaseResponse): if name: template = self.ec2_backend.get_launch_template_by_name(name) if template_id: - template = self.ec2_backend.get_launch_template_by_id(template_id) + template = self.ec2_backend.get_launch_template(template_id) max_results = self._get_int_param("MaxResults", 15) versions = self._get_multi_param("Versions") From 188969a048e01c772121fb8461deb74998e0b90c Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Mon, 29 Jul 2019 16:36:57 -0700 Subject: [PATCH 19/42] AWS Config Aggregator support - Added support for the following APIs: - put_configuration_aggregator - describe_configuration_aggregators - delete_configuration_aggregator - put_aggregation_authorization - describe_aggregation_authorizations - delete_aggregation_authorization --- README.md | 91 ++++++ moto/config/exceptions.py | 83 +++++ moto/config/models.py | 370 +++++++++++++++++++++- moto/config/responses.py | 33 ++ moto/iam/models.py | 1 - setup.py | 4 +- tests/test_config/test_config.py | 520 +++++++++++++++++++++++++++++++ 7 files changed, 1090 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 5a1d7f1f1..4e39ada35 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,7 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L | Cognito Identity Provider | @mock_cognitoidp | basic endpoints done | |-------------------------------------------------------------------------------------| | Config | @mock_config | basic endpoints done | +| | | core endpoints done | |-------------------------------------------------------------------------------------| | Data Pipeline | @mock_datapipeline | basic endpoints done | |-------------------------------------------------------------------------------------| @@ -296,6 +297,96 @@ def test_describe_instances_allowed(): See [the related test suite](https://github.com/spulec/moto/blob/master/tests/test_core/test_auth.py) for more examples. +## Very Important -- Recommended Usage +There are some important caveats to be aware of when using moto: + +*Failure to follow these guidelines could result in your tests mutating your __REAL__ infrastructure!* + +### How do I avoid tests from mutating my real infrastructure? +You need to ensure that the mocks are actually in place. Changes made to recent versions of `botocore` +have altered some of the mock behavior. In short, you need to ensure that you _always_ do the following: + +1. Ensure that your tests have dummy environment variables set up: + + export AWS_ACCESS_KEY_ID='testing' + export AWS_SECRET_ACCESS_KEY='testing' + export AWS_SECURITY_TOKEN='testing' + export AWS_SESSION_TOKEN='testing' + +1. __VERY IMPORTANT__: ensure that you have your mocks set up __BEFORE__ your `boto3` client is established. + This can typically happen if you import a module that has a `boto3` client instantiated outside of a function. + See the pesky imports section below on how to work around this. + +### Example on usage? +If you are a user of [pytest](https://pytest.org/en/latest/), you can leverage [pytest fixtures](https://pytest.org/en/latest/fixture.html#fixture) +to help set up your mocks and other AWS resources that you would need. + +Here is an example: +```python +@pytest.fixture(scope='function') +def aws_credentials(): + """Mocked AWS Credentials for moto.""" + os.environ['AWS_ACCESS_KEY_ID'] = 'testing' + os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing' + os.environ['AWS_SECURITY_TOKEN'] = 'testing' + os.environ['AWS_SESSION_TOKEN'] = 'testing' + +@pytest.fixture(scope='function') +def s3(aws_credentials): + with mock_s3(): + yield boto3.client('s3', region_name='us-east-1') + + +@pytest.fixture(scope='function') +def sts(aws_credentials): + with mock_sts(): + yield boto3.client('sts', region_name='us-east-1') + + +@pytest.fixture(scope='function') +def cloudwatch(aws_credentials): + with mock_cloudwatch(): + yield boto3.client('cloudwatch', region_name='us-east-1') + +... etc. +``` + +In the code sample above, all of the AWS/mocked fixtures take in a parameter of `aws_credentials`, +which sets the proper fake environment variables. The fake environment variables are used so that `botocore` doesn't try to locate real +credentials on your system. + +Next, once you need to do anything with the mocked AWS environment, do something like: +```python +def test_create_bucket(s3): + # s3 is a fixture defined above that yields a boto3 s3 client. + # Feel free to instantiate another boto3 S3 client -- Keep note of the region though. + s3.create_bucket(Bucket="somebucket") + + result = s3.list_buckets() + assert len(result['Buckets']) == 1 + assert result['Buckets'][0]['Name'] == 'somebucket' +``` + +### What about those pesky imports? +Recall earlier, it was mentioned that mocks should be established __BEFORE__ the clients are set up. One way +to avoid import issues is to make use of local Python imports -- i.e. import the module inside of the unit +test you want to run vs. importing at the top of the file. + +Example: +```python +def test_something(s3): + from some.package.that.does.something.with.s3 import some_func # <-- Local import for unit test + # ^^ Importing here ensures that the mock has been established. + + sume_func() # The mock has been established from the "s3" pytest fixture, so this function that uses + # a package-level S3 client will properly use the mock and not reach out to AWS. +``` + +### Other caveats +For Tox, Travis CI, and other build systems, you might need to also perform a `touch ~/.aws/credentials` +command before running the tests. As long as that file is present (empty preferably) and the environment +variables above are set, you should be good to go. + ## Stand-alone Server Mode Moto also has a stand-alone server mode. This allows you to utilize diff --git a/moto/config/exceptions.py b/moto/config/exceptions.py index b2b01d6a0..25749200f 100644 --- a/moto/config/exceptions.py +++ b/moto/config/exceptions.py @@ -52,6 +52,18 @@ class InvalidResourceTypeException(JsonRESTError): super(InvalidResourceTypeException, self).__init__("ValidationException", message) +class NoSuchConfigurationAggregatorException(JsonRESTError): + code = 400 + + def __init__(self, number=1): + if number == 1: + message = 'The configuration aggregator does not exist. Check the configuration aggregator name and try again.' + else: + message = 'At least one of the configuration aggregators does not exist. Check the configuration aggregator' \ + ' names and try again.' + super(NoSuchConfigurationAggregatorException, self).__init__("NoSuchConfigurationAggregatorException", message) + + class NoSuchConfigurationRecorderException(JsonRESTError): code = 400 @@ -78,6 +90,14 @@ class NoSuchBucketException(JsonRESTError): super(NoSuchBucketException, self).__init__("NoSuchBucketException", message) +class InvalidNextTokenException(JsonRESTError): + code = 400 + + def __init__(self): + message = 'The nextToken provided is invalid' + super(InvalidNextTokenException, self).__init__("InvalidNextTokenException", message) + + class InvalidS3KeyPrefixException(JsonRESTError): code = 400 @@ -147,3 +167,66 @@ class LastDeliveryChannelDeleteFailedException(JsonRESTError): message = 'Failed to delete last specified delivery channel with name \'{name}\', because there, ' \ 'because there is a running configuration recorder.'.format(name=name) super(LastDeliveryChannelDeleteFailedException, self).__init__("LastDeliveryChannelDeleteFailedException", message) + + +class TooManyAccountSources(JsonRESTError): + code = 400 + + def __init__(self, length): + locations = ['com.amazonaws.xyz'] * length + + message = 'Value \'[{locations}]\' at \'accountAggregationSources\' failed to satisfy constraint: ' \ + 'Member must have length less than or equal to 1'.format(locations=', '.join(locations)) + super(TooManyAccountSources, self).__init__("ValidationException", message) + + +class DuplicateTags(JsonRESTError): + code = 400 + + def __init__(self): + super(DuplicateTags, self).__init__( + 'InvalidInput', 'Duplicate tag keys found. Please note that Tag keys are case insensitive.') + + +class TagKeyTooBig(JsonRESTError): + code = 400 + + def __init__(self, tag, param='tags.X.member.key'): + super(TagKeyTooBig, self).__init__( + 'ValidationException', "1 validation error detected: Value '{}' at '{}' failed to satisfy " + "constraint: Member must have length less than or equal to 128".format(tag, param)) + + +class TagValueTooBig(JsonRESTError): + code = 400 + + def __init__(self, tag): + super(TagValueTooBig, self).__init__( + 'ValidationException', "1 validation error detected: Value '{}' at 'tags.X.member.value' failed to satisfy " + "constraint: Member must have length less than or equal to 256".format(tag)) + + +class InvalidParameterValueException(JsonRESTError): + code = 400 + + def __init__(self, message): + super(InvalidParameterValueException, self).__init__('InvalidParameterValueException', message) + + +class InvalidTagCharacters(JsonRESTError): + code = 400 + + def __init__(self, tag, param='tags.X.member.key'): + message = "1 validation error detected: Value '{}' at '{}' failed to satisfy ".format(tag, param) + message += 'constraint: Member must satisfy regular expression pattern: [\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=+\\\\-@]+' + + super(InvalidTagCharacters, self).__init__('ValidationException', message) + + +class TooManyTags(JsonRESTError): + code = 400 + + def __init__(self, tags, param='tags'): + super(TooManyTags, self).__init__( + 'ValidationException', "1 validation error detected: Value '{}' at '{}' failed to satisfy " + "constraint: Member must have length less than or equal to 50.".format(tags, param)) diff --git a/moto/config/models.py b/moto/config/models.py index cd6e07afa..6541fc981 100644 --- a/moto/config/models.py +++ b/moto/config/models.py @@ -1,6 +1,9 @@ import json +import re import time import pkg_resources +import random +import string from datetime import datetime @@ -12,37 +15,125 @@ from moto.config.exceptions import InvalidResourceTypeException, InvalidDelivery NoSuchConfigurationRecorderException, NoAvailableConfigurationRecorderException, \ InvalidDeliveryChannelNameException, NoSuchBucketException, InvalidS3KeyPrefixException, \ InvalidSNSTopicARNException, MaxNumberOfDeliveryChannelsExceededException, NoAvailableDeliveryChannelException, \ - NoSuchDeliveryChannelException, LastDeliveryChannelDeleteFailedException + NoSuchDeliveryChannelException, LastDeliveryChannelDeleteFailedException, TagKeyTooBig, \ + TooManyTags, TagValueTooBig, TooManyAccountSources, InvalidParameterValueException, InvalidNextTokenException, \ + NoSuchConfigurationAggregatorException, InvalidTagCharacters, DuplicateTags from moto.core import BaseBackend, BaseModel DEFAULT_ACCOUNT_ID = 123456789012 +POP_STRINGS = [ + 'capitalizeStart', + 'CapitalizeStart', + 'capitalizeArn', + 'CapitalizeArn', + 'capitalizeARN', + 'CapitalizeARN' +] +DEFAULT_PAGE_SIZE = 100 def datetime2int(date): return int(time.mktime(date.timetuple())) -def snake_to_camels(original): +def snake_to_camels(original, cap_start, cap_arn): parts = original.split('_') camel_cased = parts[0].lower() + ''.join(p.title() for p in parts[1:]) - camel_cased = camel_cased.replace('Arn', 'ARN') # Config uses 'ARN' instead of 'Arn' + + if cap_arn: + camel_cased = camel_cased.replace('Arn', 'ARN') # Some config services use 'ARN' instead of 'Arn' + + if cap_start: + camel_cased = camel_cased[0].upper() + camel_cased[1::] return camel_cased +def random_string(): + """Returns a random set of 8 lowercase letters for the Config Aggregator ARN""" + chars = [] + for x in range(0, 8): + chars.append(random.choice(string.ascii_lowercase)) + + return "".join(chars) + + +def validate_tag_key(tag_key, exception_param='tags.X.member.key'): + """Validates the tag key. + + :param tag_key: The tag key to check against. + :param exception_param: The exception parameter to send over to help format the message. This is to reflect + the difference between the tag and untag APIs. + :return: + """ + # Validate that the key length is correct: + if len(tag_key) > 128: + raise TagKeyTooBig(tag_key, param=exception_param) + + # Validate that the tag key fits the proper Regex: + # [\w\s_.:/=+\-@]+ SHOULD be the same as the Java regex on the AWS documentation: [\p{L}\p{Z}\p{N}_.:/=+\-@]+ + match = re.findall(r'[\w\s_.:/=+\-@]+', tag_key) + # Kudos if you can come up with a better way of doing a global search :) + if not len(match) or len(match[0]) < len(tag_key): + raise InvalidTagCharacters(tag_key, param=exception_param) + + +def check_tag_duplicate(all_tags, tag_key): + """Validates that a tag key is not a duplicate + + :param all_tags: Dict to check if there is a duplicate tag. + :param tag_key: The tag key to check against. + :return: + """ + if all_tags.get(tag_key): + raise DuplicateTags() + + +def validate_tags(tags): + proper_tags = {} + + if len(tags) > 50: + raise TooManyTags(tags) + + for tag in tags: + # Validate the Key: + validate_tag_key(tag['Key']) + check_tag_duplicate(proper_tags, tag['Key']) + + # Validate the Value: + if len(tag['Value']) > 256: + raise TagValueTooBig(tag['Value']) + + proper_tags[tag['Key']] = tag['Value'] + + return proper_tags + + class ConfigEmptyDictable(BaseModel): """Base class to make serialization easy. This assumes that the sub-class will NOT return 'None's in the JSON.""" + def __init__(self, capitalize_start=False, capitalize_arn=True): + """Assists with the serialization of the config object + :param capitalize_start: For some Config services, the first letter is lowercase -- for others it's capital + :param capitalize_arn: For some Config services, the API expects 'ARN' and for others, it expects 'Arn' + """ + self.capitalize_start = capitalize_start + self.capitalize_arn = capitalize_arn + def to_dict(self): data = {} for item, value in self.__dict__.items(): if value is not None: if isinstance(value, ConfigEmptyDictable): - data[snake_to_camels(item)] = value.to_dict() + data[snake_to_camels(item, self.capitalize_start, self.capitalize_arn)] = value.to_dict() else: - data[snake_to_camels(item)] = value + data[snake_to_camels(item, self.capitalize_start, self.capitalize_arn)] = value + + # Cleanse the extra properties: + for prop in POP_STRINGS: + data.pop(prop, None) return data @@ -50,8 +141,9 @@ class ConfigEmptyDictable(BaseModel): class ConfigRecorderStatus(ConfigEmptyDictable): def __init__(self, name): - self.name = name + super(ConfigRecorderStatus, self).__init__() + self.name = name self.recording = False self.last_start_time = None self.last_stop_time = None @@ -75,12 +167,16 @@ class ConfigRecorderStatus(ConfigEmptyDictable): class ConfigDeliverySnapshotProperties(ConfigEmptyDictable): def __init__(self, delivery_frequency): + super(ConfigDeliverySnapshotProperties, self).__init__() + self.delivery_frequency = delivery_frequency class ConfigDeliveryChannel(ConfigEmptyDictable): def __init__(self, name, s3_bucket_name, prefix=None, sns_arn=None, snapshot_properties=None): + super(ConfigDeliveryChannel, self).__init__() + self.name = name self.s3_bucket_name = s3_bucket_name self.s3_key_prefix = prefix @@ -91,6 +187,8 @@ class ConfigDeliveryChannel(ConfigEmptyDictable): class RecordingGroup(ConfigEmptyDictable): def __init__(self, all_supported=True, include_global_resource_types=False, resource_types=None): + super(RecordingGroup, self).__init__() + self.all_supported = all_supported self.include_global_resource_types = include_global_resource_types self.resource_types = resource_types @@ -99,6 +197,8 @@ class RecordingGroup(ConfigEmptyDictable): class ConfigRecorder(ConfigEmptyDictable): def __init__(self, role_arn, recording_group, name='default', status=None): + super(ConfigRecorder, self).__init__() + self.name = name self.role_arn = role_arn self.recording_group = recording_group @@ -109,18 +209,118 @@ class ConfigRecorder(ConfigEmptyDictable): self.status = status +class AccountAggregatorSource(ConfigEmptyDictable): + + def __init__(self, account_ids, aws_regions=None, all_aws_regions=None): + super(AccountAggregatorSource, self).__init__(capitalize_start=True) + + # Can't have both the regions and all_regions flag present -- also can't have them both missing: + if aws_regions and all_aws_regions: + raise InvalidParameterValueException('Your configuration aggregator contains a list of regions and also specifies ' + 'the use of all regions. You must choose one of these options.') + + if not (aws_regions or all_aws_regions): + raise InvalidParameterValueException('Your request does not specify any regions. Select AWS Config-supported ' + 'regions and try again.') + + self.account_ids = account_ids + self.aws_regions = aws_regions + + if not all_aws_regions: + all_aws_regions = False + + self.all_aws_regions = all_aws_regions + + +class OrganizationAggregationSource(ConfigEmptyDictable): + + def __init__(self, role_arn, aws_regions=None, all_aws_regions=None): + super(OrganizationAggregationSource, self).__init__(capitalize_start=True, capitalize_arn=False) + + # Can't have both the regions and all_regions flag present -- also can't have them both missing: + if aws_regions and all_aws_regions: + raise InvalidParameterValueException('Your configuration aggregator contains a list of regions and also specifies ' + 'the use of all regions. You must choose one of these options.') + + if not (aws_regions or all_aws_regions): + raise InvalidParameterValueException('Your request does not specify any regions. Select AWS Config-supported ' + 'regions and try again.') + + self.role_arn = role_arn + self.aws_regions = aws_regions + + if not all_aws_regions: + all_aws_regions = False + + self.all_aws_regions = all_aws_regions + + +class ConfigAggregator(ConfigEmptyDictable): + + def __init__(self, name, region, account_sources=None, org_source=None, tags=None): + super(ConfigAggregator, self).__init__(capitalize_start=True, capitalize_arn=False) + + self.configuration_aggregator_name = name + self.configuration_aggregator_arn = 'arn:aws:config:{region}:{id}:config-aggregator/config-aggregator-{random}'.format( + region=region, + id=DEFAULT_ACCOUNT_ID, + random=random_string() + ) + self.account_aggregation_sources = account_sources + self.organization_aggregation_source = org_source + self.creation_time = datetime2int(datetime.utcnow()) + self.last_updated_time = datetime2int(datetime.utcnow()) + + # Tags are listed in the list_tags_for_resource API call ... not implementing yet -- please feel free to! + self.tags = tags or {} + + # Override the to_dict so that we can format the tags properly... + def to_dict(self): + result = super(ConfigAggregator, self).to_dict() + + # Override the account aggregation sources if present: + if self.account_aggregation_sources: + result['AccountAggregationSources'] = [a.to_dict() for a in self.account_aggregation_sources] + + # Tags are listed in the list_tags_for_resource API call ... not implementing yet -- please feel free to! + # if self.tags: + # result['Tags'] = [{'Key': key, 'Value': value} for key, value in self.tags.items()] + + return result + + +class ConfigAggregationAuthorization(ConfigEmptyDictable): + + def __init__(self, current_region, authorized_account_id, authorized_aws_region, tags=None): + super(ConfigAggregationAuthorization, self).__init__(capitalize_start=True, capitalize_arn=False) + + self.aggregation_authorization_arn = 'arn:aws:config:{region}:{id}:aggregation-authorization/' \ + '{auth_account}/{auth_region}'.format(region=current_region, + id=DEFAULT_ACCOUNT_ID, + auth_account=authorized_account_id, + auth_region=authorized_aws_region) + self.authorized_account_id = authorized_account_id + self.authorized_aws_region = authorized_aws_region + self.creation_time = datetime2int(datetime.utcnow()) + + # Tags are listed in the list_tags_for_resource API call ... not implementing yet -- please feel free to! + self.tags = tags or {} + + class ConfigBackend(BaseBackend): def __init__(self): self.recorders = {} self.delivery_channels = {} + self.config_aggregators = {} + self.aggregation_authorizations = {} @staticmethod def _validate_resource_types(resource_list): # Load the service file: resource_package = 'botocore' resource_path = '/'.join(('data', 'config', '2014-11-12', 'service-2.json')) - conifg_schema = json.loads(pkg_resources.resource_string(resource_package, resource_path)) + config_schema = json.loads(pkg_resources.resource_string(resource_package, resource_path)) # Verify that each entry exists in the supported list: bad_list = [] @@ -128,11 +328,11 @@ class ConfigBackend(BaseBackend): # For PY2: r_str = str(resource) - if r_str not in conifg_schema['shapes']['ResourceType']['enum']: + if r_str not in config_schema['shapes']['ResourceType']['enum']: bad_list.append(r_str) if bad_list: - raise InvalidResourceTypeException(bad_list, conifg_schema['shapes']['ResourceType']['enum']) + raise InvalidResourceTypeException(bad_list, config_schema['shapes']['ResourceType']['enum']) @staticmethod def _validate_delivery_snapshot_properties(properties): @@ -147,6 +347,158 @@ class ConfigBackend(BaseBackend): raise InvalidDeliveryFrequency(properties.get('deliveryFrequency', None), conifg_schema['shapes']['MaximumExecutionFrequency']['enum']) + def put_configuration_aggregator(self, config_aggregator, region): + # Validate the name: + if len(config_aggregator['ConfigurationAggregatorName']) > 256: + raise NameTooLongException(config_aggregator['ConfigurationAggregatorName'], 'configurationAggregatorName') + + account_sources = None + org_source = None + + # Tag validation: + tags = validate_tags(config_aggregator.get('Tags', [])) + + # Exception if both AccountAggregationSources and OrganizationAggregationSource are supplied: + if config_aggregator.get('AccountAggregationSources') and config_aggregator.get('OrganizationAggregationSource'): + raise InvalidParameterValueException('The configuration aggregator cannot be created because your request contains both the' + ' AccountAggregationSource and the OrganizationAggregationSource. Include only ' + 'one aggregation source and try again.') + + # If neither are supplied: + if not config_aggregator.get('AccountAggregationSources') and not config_aggregator.get('OrganizationAggregationSource'): + raise InvalidParameterValueException('The configuration aggregator cannot be created because your request is missing either ' + 'the AccountAggregationSource or the OrganizationAggregationSource. Include the ' + 'appropriate aggregation source and try again.') + + if config_aggregator.get('AccountAggregationSources'): + # Currently, only 1 account aggregation source can be set: + if len(config_aggregator['AccountAggregationSources']) > 1: + raise TooManyAccountSources(len(config_aggregator['AccountAggregationSources'])) + + account_sources = [] + for a in config_aggregator['AccountAggregationSources']: + account_sources.append(AccountAggregatorSource(a['AccountIds'], aws_regions=a.get('AwsRegions'), + all_aws_regions=a.get('AllAwsRegions'))) + + else: + org_source = OrganizationAggregationSource(config_aggregator['OrganizationAggregationSource']['RoleArn'], + aws_regions=config_aggregator['OrganizationAggregationSource'].get('AwsRegions'), + all_aws_regions=config_aggregator['OrganizationAggregationSource'].get( + 'AllAwsRegions')) + + # Grab the existing one if it exists and update it: + if not self.config_aggregators.get(config_aggregator['ConfigurationAggregatorName']): + aggregator = ConfigAggregator(config_aggregator['ConfigurationAggregatorName'], region, account_sources=account_sources, + org_source=org_source, tags=tags) + self.config_aggregators[config_aggregator['ConfigurationAggregatorName']] = aggregator + + else: + aggregator = self.config_aggregators[config_aggregator['ConfigurationAggregatorName']] + aggregator.tags = tags + aggregator.account_aggregation_sources = account_sources + aggregator.organization_aggregation_source = org_source + aggregator.last_updated_time = datetime2int(datetime.utcnow()) + + return aggregator.to_dict() + + def describe_configuration_aggregators(self, names, token, limit): + limit = DEFAULT_PAGE_SIZE if not limit or limit < 0 else limit + agg_list = [] + result = {'ConfigurationAggregators': []} + + if names: + for name in names: + if not self.config_aggregators.get(name): + raise NoSuchConfigurationAggregatorException(number=len(names)) + + agg_list.append(name) + + else: + agg_list = list(self.config_aggregators.keys()) + + # Empty? + if not agg_list: + return result + + # Sort by name: + sorted_aggregators = sorted(agg_list) + + # Get the start: + if not token: + start = 0 + else: + # Tokens for this moto feature are just the next names of the items in the list: + if not self.config_aggregators.get(token): + raise InvalidNextTokenException() + + start = sorted_aggregators.index(token) + + # Get the list of items to collect: + agg_list = sorted_aggregators[start:(start + limit)] + result['ConfigurationAggregators'] = [self.config_aggregators[agg].to_dict() for agg in agg_list] + + if len(sorted_aggregators) > (start + limit): + result['NextToken'] = sorted_aggregators[start + limit] + + return result + + def delete_configuration_aggregator(self, config_aggregator): + if not self.config_aggregators.get(config_aggregator): + raise NoSuchConfigurationAggregatorException() + + del self.config_aggregators[config_aggregator] + + def put_aggregation_authorization(self, current_region, authorized_account, authorized_region, tags): + # Tag validation: + tags = validate_tags(tags or []) + + # Does this already exist? + key = '{}/{}'.format(authorized_account, authorized_region) + agg_auth = self.aggregation_authorizations.get(key) + if not agg_auth: + agg_auth = ConfigAggregationAuthorization(current_region, authorized_account, authorized_region, tags=tags) + self.aggregation_authorizations['{}/{}'.format(authorized_account, authorized_region)] = agg_auth + else: + # Only update the tags: + agg_auth.tags = tags + + return agg_auth.to_dict() + + def describe_aggregation_authorizations(self, token, limit): + limit = DEFAULT_PAGE_SIZE if not limit or limit < 0 else limit + result = {'AggregationAuthorizations': []} + + if not self.aggregation_authorizations: + return result + + # Sort by name: + sorted_authorizations = sorted(self.aggregation_authorizations.keys()) + + # Get the start: + if not token: + start = 0 + else: + # Tokens for this moto feature are just the next names of the items in the list: + if not self.aggregation_authorizations.get(token): + raise InvalidNextTokenException() + + start = sorted_authorizations.index(token) + + # Get the list of items to collect: + auth_list = sorted_authorizations[start:(start + limit)] + result['AggregationAuthorizations'] = [self.aggregation_authorizations[auth].to_dict() for auth in auth_list] + + if len(sorted_authorizations) > (start + limit): + result['NextToken'] = sorted_authorizations[start + limit] + + return result + + def delete_aggregation_authorization(self, authorized_account, authorized_region): + # This will always return a 200 -- regardless if there is or isn't an existing + # aggregation authorization. + key = '{}/{}'.format(authorized_account, authorized_region) + self.aggregation_authorizations.pop(key, None) + def put_configuration_recorder(self, config_recorder): # Validate the name: if not config_recorder.get('name'): diff --git a/moto/config/responses.py b/moto/config/responses.py index 286b2349f..03612d403 100644 --- a/moto/config/responses.py +++ b/moto/config/responses.py @@ -13,6 +13,39 @@ class ConfigResponse(BaseResponse): self.config_backend.put_configuration_recorder(self._get_param('ConfigurationRecorder')) return "" + def put_configuration_aggregator(self): + aggregator = self.config_backend.put_configuration_aggregator(json.loads(self.body), self.region) + schema = {'ConfigurationAggregator': aggregator} + return json.dumps(schema) + + def describe_configuration_aggregators(self): + aggregators = self.config_backend.describe_configuration_aggregators(self._get_param('ConfigurationAggregatorNames'), + self._get_param('NextToken'), + self._get_param('Limit')) + return json.dumps(aggregators) + + def delete_configuration_aggregator(self): + self.config_backend.delete_configuration_aggregator(self._get_param('ConfigurationAggregatorName')) + return "" + + def put_aggregation_authorization(self): + agg_auth = self.config_backend.put_aggregation_authorization(self.region, + self._get_param('AuthorizedAccountId'), + self._get_param('AuthorizedAwsRegion'), + self._get_param('Tags')) + schema = {'AggregationAuthorization': agg_auth} + return json.dumps(schema) + + def describe_aggregation_authorizations(self): + authorizations = self.config_backend.describe_aggregation_authorizations(self._get_param('NextToken'), self._get_param('Limit')) + + return json.dumps(authorizations) + + def delete_aggregation_authorization(self): + self.config_backend.delete_aggregation_authorization(self._get_param('AuthorizedAccountId'), self._get_param('AuthorizedAwsRegion')) + + return "" + def describe_configuration_recorders(self): recorders = self.config_backend.describe_configuration_recorders(self._get_param('ConfigurationRecorderNames')) schema = {'ConfigurationRecorders': recorders} diff --git a/moto/iam/models.py b/moto/iam/models.py index bb19b8cad..21bb87e02 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -694,7 +694,6 @@ class IAMBackend(BaseBackend): def _validate_tag_key(self, tag_key, exception_param='tags.X.member.key'): """Validates the tag key. - :param all_tags: Dict to check if there is a duplicate tag. :param tag_key: The tag key to check against. :param exception_param: The exception parameter to send over to help format the message. This is to reflect the difference between the tag and untag APIs. diff --git a/setup.py b/setup.py index 17a4f6691..ff4d9720a 100755 --- a/setup.py +++ b/setup.py @@ -30,8 +30,8 @@ def get_version(): install_requires = [ "Jinja2>=2.10.1", "boto>=2.36.0", - "boto3>=1.9.86", - "botocore>=1.12.86", + "boto3>=1.9.201", + "botocore>=1.12.201", "cryptography>=2.3.0", "requests>=2.5", "xmltodict", diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py index 96c62455c..95e88cab1 100644 --- a/tests/test_config/test_config.py +++ b/tests/test_config/test_config.py @@ -123,6 +123,526 @@ def test_put_configuration_recorder(): assert "maximum number of configuration recorders: 1 is reached." in ce.exception.response['Error']['Message'] +@mock_config +def test_put_configuration_aggregator(): + client = boto3.client('config', region_name='us-west-2') + + # With too many aggregation sources: + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + '111111111111', + '222222222222' + ], + 'AwsRegions': [ + 'us-east-1', + 'us-west-2' + ] + }, + { + 'AccountIds': [ + '012345678910', + '111111111111', + '222222222222' + ], + 'AwsRegions': [ + 'us-east-1', + 'us-west-2' + ] + } + ] + ) + assert 'Member must have length less than or equal to 1' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # With an invalid region config (no regions defined): + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + '111111111111', + '222222222222' + ], + 'AllAwsRegions': False + } + ] + ) + assert 'Your request does not specify any regions' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidParameterValueException' + + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + OrganizationAggregationSource={ + 'RoleArn': 'arn:aws:iam::012345678910:role/SomeRole' + } + ) + assert 'Your request does not specify any regions' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidParameterValueException' + + # With both region flags defined: + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + '111111111111', + '222222222222' + ], + 'AwsRegions': [ + 'us-east-1', + 'us-west-2' + ], + 'AllAwsRegions': True + } + ] + ) + assert 'You must choose one of these options' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidParameterValueException' + + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + OrganizationAggregationSource={ + 'RoleArn': 'arn:aws:iam::012345678910:role/SomeRole', + 'AwsRegions': [ + 'us-east-1', + 'us-west-2' + ], + 'AllAwsRegions': True + } + ) + assert 'You must choose one of these options' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidParameterValueException' + + # Name too long: + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='a' * 257, + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': True + } + ] + ) + assert 'configurationAggregatorName' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # Too many tags (>50): + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': True + } + ], + Tags=[{'Key': '{}'.format(x), 'Value': '{}'.format(x)} for x in range(0, 51)] + ) + assert 'Member must have length less than or equal to 50' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # Tag key is too big (>128 chars): + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': True + } + ], + Tags=[{'Key': 'a' * 129, 'Value': 'a'}] + ) + assert 'Member must have length less than or equal to 128' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # Tag value is too big (>256 chars): + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': True + } + ], + Tags=[{'Key': 'tag', 'Value': 'a' * 257}] + ) + assert 'Member must have length less than or equal to 256' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # Duplicate Tags: + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': True + } + ], + Tags=[{'Key': 'a', 'Value': 'a'}, {'Key': 'a', 'Value': 'a'}] + ) + assert 'Duplicate tag keys found.' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidInput' + + # Invalid characters in the tag key: + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': True + } + ], + Tags=[{'Key': '!', 'Value': 'a'}] + ) + assert 'Member must satisfy regular expression pattern:' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # If it contains both the AccountAggregationSources and the OrganizationAggregationSource + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': False + } + ], + OrganizationAggregationSource={ + 'RoleArn': 'arn:aws:iam::012345678910:role/SomeRole', + 'AllAwsRegions': False + } + ) + assert 'AccountAggregationSource and the OrganizationAggregationSource' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidParameterValueException' + + # If it contains neither: + with assert_raises(ClientError) as ce: + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + ) + assert 'AccountAggregationSource or the OrganizationAggregationSource' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidParameterValueException' + + # Just make one: + account_aggregation_source = { + 'AccountIds': [ + '012345678910', + '111111111111', + '222222222222' + ], + 'AwsRegions': [ + 'us-east-1', + 'us-west-2' + ], + 'AllAwsRegions': False + } + + result = client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[account_aggregation_source], + ) + assert result['ConfigurationAggregator']['ConfigurationAggregatorName'] == 'testing' + assert result['ConfigurationAggregator']['AccountAggregationSources'] == [account_aggregation_source] + assert 'arn:aws:config:us-west-2:123456789012:config-aggregator/config-aggregator-' in \ + result['ConfigurationAggregator']['ConfigurationAggregatorArn'] + assert result['ConfigurationAggregator']['CreationTime'] == result['ConfigurationAggregator']['LastUpdatedTime'] + + # Update the existing one: + original_arn = result['ConfigurationAggregator']['ConfigurationAggregatorArn'] + account_aggregation_source.pop('AwsRegions') + account_aggregation_source['AllAwsRegions'] = True + result = client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[account_aggregation_source] + ) + + assert result['ConfigurationAggregator']['ConfigurationAggregatorName'] == 'testing' + assert result['ConfigurationAggregator']['AccountAggregationSources'] == [account_aggregation_source] + assert result['ConfigurationAggregator']['ConfigurationAggregatorArn'] == original_arn + + # Make an org one: + result = client.put_configuration_aggregator( + ConfigurationAggregatorName='testingOrg', + OrganizationAggregationSource={ + 'RoleArn': 'arn:aws:iam::012345678910:role/SomeRole', + 'AwsRegions': ['us-east-1', 'us-west-2'] + } + ) + + assert result['ConfigurationAggregator']['ConfigurationAggregatorName'] == 'testingOrg' + assert result['ConfigurationAggregator']['OrganizationAggregationSource'] == { + 'RoleArn': 'arn:aws:iam::012345678910:role/SomeRole', + 'AwsRegions': [ + 'us-east-1', + 'us-west-2' + ], + 'AllAwsRegions': False + } + + +@mock_config +def test_describe_configuration_aggregators(): + client = boto3.client('config', region_name='us-west-2') + + # Without any config aggregators: + assert not client.describe_configuration_aggregators()['ConfigurationAggregators'] + + # Make 10 config aggregators: + for x in range(0, 10): + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing{}'.format(x), + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': True + } + ] + ) + + # Describe with an incorrect name: + with assert_raises(ClientError) as ce: + client.describe_configuration_aggregators(ConfigurationAggregatorNames=['DoesNotExist']) + assert 'The configuration aggregator does not exist.' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationAggregatorException' + + # Error describe with more than 1 item in the list: + with assert_raises(ClientError) as ce: + client.describe_configuration_aggregators(ConfigurationAggregatorNames=['testing0', 'DoesNotExist']) + assert 'At least one of the configuration aggregators does not exist.' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationAggregatorException' + + # Get the normal list: + result = client.describe_configuration_aggregators() + assert not result.get('NextToken') + assert len(result['ConfigurationAggregators']) == 10 + + # Test filtered list: + agg_names = ['testing0', 'testing1', 'testing2'] + result = client.describe_configuration_aggregators(ConfigurationAggregatorNames=agg_names) + assert not result.get('NextToken') + assert len(result['ConfigurationAggregators']) == 3 + assert [agg['ConfigurationAggregatorName'] for agg in result['ConfigurationAggregators']] == agg_names + + # Test Pagination: + result = client.describe_configuration_aggregators(Limit=4) + assert len(result['ConfigurationAggregators']) == 4 + assert result['NextToken'] == 'testing4' + assert [agg['ConfigurationAggregatorName'] for agg in result['ConfigurationAggregators']] == \ + ['testing{}'.format(x) for x in range(0, 4)] + result = client.describe_configuration_aggregators(Limit=4, NextToken='testing4') + assert len(result['ConfigurationAggregators']) == 4 + assert result['NextToken'] == 'testing8' + assert [agg['ConfigurationAggregatorName'] for agg in result['ConfigurationAggregators']] == \ + ['testing{}'.format(x) for x in range(4, 8)] + result = client.describe_configuration_aggregators(Limit=4, NextToken='testing8') + assert len(result['ConfigurationAggregators']) == 2 + assert not result.get('NextToken') + assert [agg['ConfigurationAggregatorName'] for agg in result['ConfigurationAggregators']] == \ + ['testing{}'.format(x) for x in range(8, 10)] + + # Test Pagination with Filtering: + result = client.describe_configuration_aggregators(ConfigurationAggregatorNames=['testing2', 'testing4'], Limit=1) + assert len(result['ConfigurationAggregators']) == 1 + assert result['NextToken'] == 'testing4' + assert result['ConfigurationAggregators'][0]['ConfigurationAggregatorName'] == 'testing2' + result = client.describe_configuration_aggregators(ConfigurationAggregatorNames=['testing2', 'testing4'], Limit=1, NextToken='testing4') + assert not result.get('NextToken') + assert result['ConfigurationAggregators'][0]['ConfigurationAggregatorName'] == 'testing4' + + # Test with an invalid filter: + with assert_raises(ClientError) as ce: + client.describe_configuration_aggregators(NextToken='WRONG') + assert 'The nextToken provided is invalid' == ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidNextTokenException' + + +@mock_config +def test_put_aggregation_authorization(): + client = boto3.client('config', region_name='us-west-2') + + # Too many tags (>50): + with assert_raises(ClientError) as ce: + client.put_aggregation_authorization( + AuthorizedAccountId='012345678910', + AuthorizedAwsRegion='us-west-2', + Tags=[{'Key': '{}'.format(x), 'Value': '{}'.format(x)} for x in range(0, 51)] + ) + assert 'Member must have length less than or equal to 50' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # Tag key is too big (>128 chars): + with assert_raises(ClientError) as ce: + client.put_aggregation_authorization( + AuthorizedAccountId='012345678910', + AuthorizedAwsRegion='us-west-2', + Tags=[{'Key': 'a' * 129, 'Value': 'a'}] + ) + assert 'Member must have length less than or equal to 128' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # Tag value is too big (>256 chars): + with assert_raises(ClientError) as ce: + client.put_aggregation_authorization( + AuthorizedAccountId='012345678910', + AuthorizedAwsRegion='us-west-2', + Tags=[{'Key': 'tag', 'Value': 'a' * 257}] + ) + assert 'Member must have length less than or equal to 256' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # Duplicate Tags: + with assert_raises(ClientError) as ce: + client.put_aggregation_authorization( + AuthorizedAccountId='012345678910', + AuthorizedAwsRegion='us-west-2', + Tags=[{'Key': 'a', 'Value': 'a'}, {'Key': 'a', 'Value': 'a'}] + ) + assert 'Duplicate tag keys found.' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidInput' + + # Invalid characters in the tag key: + with assert_raises(ClientError) as ce: + client.put_aggregation_authorization( + AuthorizedAccountId='012345678910', + AuthorizedAwsRegion='us-west-2', + Tags=[{'Key': '!', 'Value': 'a'}] + ) + assert 'Member must satisfy regular expression pattern:' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'ValidationException' + + # Put a normal one there: + result = client.put_aggregation_authorization(AuthorizedAccountId='012345678910', AuthorizedAwsRegion='us-east-1', + Tags=[{'Key': 'tag', 'Value': 'a'}]) + + assert result['AggregationAuthorization']['AggregationAuthorizationArn'] == 'arn:aws:config:us-west-2:123456789012:' \ + 'aggregation-authorization/012345678910/us-east-1' + assert result['AggregationAuthorization']['AuthorizedAccountId'] == '012345678910' + assert result['AggregationAuthorization']['AuthorizedAwsRegion'] == 'us-east-1' + assert isinstance(result['AggregationAuthorization']['CreationTime'], datetime) + + creation_date = result['AggregationAuthorization']['CreationTime'] + + # And again: + result = client.put_aggregation_authorization(AuthorizedAccountId='012345678910', AuthorizedAwsRegion='us-east-1') + assert result['AggregationAuthorization']['AggregationAuthorizationArn'] == 'arn:aws:config:us-west-2:123456789012:' \ + 'aggregation-authorization/012345678910/us-east-1' + assert result['AggregationAuthorization']['AuthorizedAccountId'] == '012345678910' + assert result['AggregationAuthorization']['AuthorizedAwsRegion'] == 'us-east-1' + assert result['AggregationAuthorization']['CreationTime'] == creation_date + + +@mock_config +def test_describe_aggregation_authorizations(): + client = boto3.client('config', region_name='us-west-2') + + # With no aggregation authorizations: + assert not client.describe_aggregation_authorizations()['AggregationAuthorizations'] + + # Make 10 account authorizations: + for i in range(0, 10): + client.put_aggregation_authorization(AuthorizedAccountId='{}'.format(str(i) * 12), AuthorizedAwsRegion='us-west-2') + + result = client.describe_aggregation_authorizations() + assert len(result['AggregationAuthorizations']) == 10 + assert not result.get('NextToken') + for i in range(0, 10): + assert result['AggregationAuthorizations'][i]['AuthorizedAccountId'] == str(i) * 12 + + # Test Pagination: + result = client.describe_aggregation_authorizations(Limit=4) + assert len(result['AggregationAuthorizations']) == 4 + assert result['NextToken'] == ('4' * 12) + '/us-west-2' + assert [auth['AuthorizedAccountId'] for auth in result['AggregationAuthorizations']] == ['{}'.format(str(x) * 12) for x in range(0, 4)] + + result = client.describe_aggregation_authorizations(Limit=4, NextToken=('4' * 12) + '/us-west-2') + assert len(result['AggregationAuthorizations']) == 4 + assert result['NextToken'] == ('8' * 12) + '/us-west-2' + assert [auth['AuthorizedAccountId'] for auth in result['AggregationAuthorizations']] == ['{}'.format(str(x) * 12) for x in range(4, 8)] + + result = client.describe_aggregation_authorizations(Limit=4, NextToken=('8' * 12) + '/us-west-2') + assert len(result['AggregationAuthorizations']) == 2 + assert not result.get('NextToken') + assert [auth['AuthorizedAccountId'] for auth in result['AggregationAuthorizations']] == ['{}'.format(str(x) * 12) for x in range(8, 10)] + + # Test with an invalid filter: + with assert_raises(ClientError) as ce: + client.describe_aggregation_authorizations(NextToken='WRONG') + assert 'The nextToken provided is invalid' == ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'InvalidNextTokenException' + + +@mock_config +def test_delete_aggregation_authorization(): + client = boto3.client('config', region_name='us-west-2') + + client.put_aggregation_authorization(AuthorizedAccountId='012345678910', AuthorizedAwsRegion='us-west-2') + + # Delete it: + client.delete_aggregation_authorization(AuthorizedAccountId='012345678910', AuthorizedAwsRegion='us-west-2') + + # Verify that none are there: + assert not client.describe_aggregation_authorizations()['AggregationAuthorizations'] + + # Try it again -- nothing should happen: + client.delete_aggregation_authorization(AuthorizedAccountId='012345678910', AuthorizedAwsRegion='us-west-2') + + +@mock_config +def test_delete_configuration_aggregator(): + client = boto3.client('config', region_name='us-west-2') + client.put_configuration_aggregator( + ConfigurationAggregatorName='testing', + AccountAggregationSources=[ + { + 'AccountIds': [ + '012345678910', + ], + 'AllAwsRegions': True + } + ] + ) + + client.delete_configuration_aggregator(ConfigurationAggregatorName='testing') + + # And again to confirm that it's deleted: + with assert_raises(ClientError) as ce: + client.delete_configuration_aggregator(ConfigurationAggregatorName='testing') + assert 'The configuration aggregator does not exist.' in ce.exception.response['Error']['Message'] + assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationAggregatorException' + + @mock_config def test_describe_configurations(): client = boto3.client('config', region_name='us-west-2') From 154b4ef84483626d028590e6e22c0ce9901abaaa Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Mon, 19 Aug 2019 17:54:35 -0500 Subject: [PATCH 20/42] Simplify xml_serialize, warn when unknown type used --- moto/ec2/responses/launch_templates.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/moto/ec2/responses/launch_templates.py b/moto/ec2/responses/launch_templates.py index ab6f54be1..36c10eea1 100644 --- a/moto/ec2/responses/launch_templates.py +++ b/moto/ec2/responses/launch_templates.py @@ -20,28 +20,27 @@ def xml_root(name): def xml_serialize(tree, key, value): - if key: - name = key[0].lower() + key[1:] - if isinstance(value, list): - if name[-1] == 's': - name = name[:-1] + name = key[0].lower() + key[1:] + if isinstance(value, list): + if name[-1] == 's': + name = name[:-1] - name = name + 'Set' + name = name + 'Set' - node = ElementTree.SubElement(tree, name) - else: - node = tree + node = ElementTree.SubElement(tree, name) if isinstance(value, (str, int, float, six.text_type)): node.text = str(value) - elif isinstance(value, bool): - node.text = str(value).lower() elif isinstance(value, dict): for dictkey, dictvalue in six.iteritems(value): xml_serialize(node, dictkey, dictvalue) elif isinstance(value, list): for item in value: xml_serialize(node, 'item', item) + elif value == None: + pass + else: + raise NotImplementedError("Don't know how to serialize \"{}\" to xml".format(value.__class__)) def pretty_xml(tree): From 743e5be4d3368fd4f69b31bbf4da2bcb58c9a6b8 Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Mon, 19 Aug 2019 17:57:39 -0500 Subject: [PATCH 21/42] Confirm describe_launch_template_versions works with Versions, MinVersion, and MaxVersion options --- moto/ec2/responses/launch_templates.py | 6 +- tests/test_ec2/test_launch_templates.py | 143 ++++++++++++++++++++++++ 2 files changed, 146 insertions(+), 3 deletions(-) diff --git a/moto/ec2/responses/launch_templates.py b/moto/ec2/responses/launch_templates.py index 36c10eea1..870a8be74 100644 --- a/moto/ec2/responses/launch_templates.py +++ b/moto/ec2/responses/launch_templates.py @@ -164,10 +164,11 @@ class LaunchTemplates(BaseResponse): template = self.ec2_backend.get_launch_template(template_id) max_results = self._get_int_param("MaxResults", 15) - versions = self._get_multi_param("Versions") + versions = self._get_multi_param("LaunchTemplateVersion") min_version = self._get_int_param("MinVersion") max_version = self._get_int_param("MaxVersion") + filters = filters_from_querystring(self.querystring) if filters: raise FilterNotImplementedError("all filters", "DescribeLaunchTemplateVersions") @@ -192,10 +193,9 @@ class LaunchTemplates(BaseResponse): vMax = min_version + max_results vMin = min_version - 1 - vMax = vMax - 1 ret_versions = template.versions[vMin:vMax] elif max_version: - vMax = max_version - 1 + vMax = max_version ret_versions = template.versions[:vMax] else: ret_versions = template.versions diff --git a/tests/test_ec2/test_launch_templates.py b/tests/test_ec2/test_launch_templates.py index ae5214c0d..afe0488ce 100644 --- a/tests/test_ec2/test_launch_templates.py +++ b/tests/test_ec2/test_launch_templates.py @@ -48,6 +48,7 @@ def test_launch_template_create(): str(ex.exception).should.equal( 'An error occurred (InvalidLaunchTemplateName.AlreadyExistsException) when calling the CreateLaunchTemplate operation: Launch template name already in use.') + @mock_ec2 def test_describe_launch_template_versions(): template_data = { @@ -136,6 +137,148 @@ def test_describe_template_versions_with_multiple_versions(): resp["LaunchTemplateVersions"][0]["LaunchTemplateData"]["ImageId"].should.equal("ami-abc123") resp["LaunchTemplateVersions"][1]["LaunchTemplateData"]["ImageId"].should.equal("ami-def456") + +@mock_ec2 +def test_describe_launch_template_versions_with_versions_option(): + cli = boto3.client("ec2", region_name="us-east-1") + + cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-def456" + }, + VersionDescription="new ami") + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-hij789" + }, + VersionDescription="new ami, again") + + resp = cli.describe_launch_template_versions( + LaunchTemplateName="test-template", + Versions=["2","3"]) + + resp["LaunchTemplateVersions"].should.have.length_of(2) + resp["LaunchTemplateVersions"][0]["LaunchTemplateData"]["ImageId"].should.equal("ami-def456") + resp["LaunchTemplateVersions"][1]["LaunchTemplateData"]["ImageId"].should.equal("ami-hij789") + + +@mock_ec2 +def test_describe_launch_template_versions_with_min(): + cli = boto3.client("ec2", region_name="us-east-1") + + cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-def456" + }, + VersionDescription="new ami") + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-hij789" + }, + VersionDescription="new ami, again") + + resp = cli.describe_launch_template_versions( + LaunchTemplateName="test-template", + MinVersion="2") + + resp["LaunchTemplateVersions"].should.have.length_of(2) + resp["LaunchTemplateVersions"][0]["LaunchTemplateData"]["ImageId"].should.equal("ami-def456") + resp["LaunchTemplateVersions"][1]["LaunchTemplateData"]["ImageId"].should.equal("ami-hij789") + + +@mock_ec2 +def test_describe_launch_template_versions_with_max(): + cli = boto3.client("ec2", region_name="us-east-1") + + cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-def456" + }, + VersionDescription="new ami") + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-hij789" + }, + VersionDescription="new ami, again") + + resp = cli.describe_launch_template_versions( + LaunchTemplateName="test-template", + MaxVersion="2") + + resp["LaunchTemplateVersions"].should.have.length_of(2) + resp["LaunchTemplateVersions"][0]["LaunchTemplateData"]["ImageId"].should.equal("ami-abc123") + resp["LaunchTemplateVersions"][1]["LaunchTemplateData"]["ImageId"].should.equal("ami-def456") + + +@mock_ec2 +def test_describe_launch_template_versions_with_min_and_max(): + cli = boto3.client("ec2", region_name="us-east-1") + + cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-def456" + }, + VersionDescription="new ami") + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-hij789" + }, + VersionDescription="new ami, again") + + cli.create_launch_template_version( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-345abc" + }, + VersionDescription="new ami, because why not") + + resp = cli.describe_launch_template_versions( + LaunchTemplateName="test-template", + MinVersion="2", + MaxVersion="3") + + resp["LaunchTemplateVersions"].should.have.length_of(2) + resp["LaunchTemplateVersions"][0]["LaunchTemplateData"]["ImageId"].should.equal("ami-def456") + resp["LaunchTemplateVersions"][1]["LaunchTemplateData"]["ImageId"].should.equal("ami-hij789") + + + @mock_ec2 def test_describe_launch_templates(): cli = boto3.client("ec2", region_name="us-east-1") From a1aa08771850b27afafaaa1c821528b02adf5d9c Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Mon, 19 Aug 2019 17:58:19 -0500 Subject: [PATCH 22/42] Add test for creating launch templates with TagSpecifications option --- moto/ec2/responses/launch_templates.py | 18 ++++++++++----- tests/test_ec2/test_launch_templates.py | 29 +++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/moto/ec2/responses/launch_templates.py b/moto/ec2/responses/launch_templates.py index 870a8be74..4863492bb 100644 --- a/moto/ec2/responses/launch_templates.py +++ b/moto/ec2/responses/launch_templates.py @@ -94,17 +94,25 @@ class LaunchTemplates(BaseResponse): def create_launch_template(self): name = self._get_param('LaunchTemplateName') version_description = self._get_param('VersionDescription') - tag_spec = self._get_param('TagSpecifications') + tag_spec = self._parse_tag_specification("TagSpecification") raw_template_data = self._get_dict_param('LaunchTemplateData.') parsed_template_data = parse_object(raw_template_data) - if tag_spec: - if 'TagSpecifications' not in parsed_template_data: - parsed_template_data['TagSpecifications'] = [] - parsed_template_data['TagSpecifications'].extend(tag_spec) if self.is_not_dryrun('CreateLaunchTemplate'): + if tag_spec: + if 'TagSpecifications' not in parsed_template_data: + parsed_template_data['TagSpecifications'] = [] + converted_tag_spec = [] + for resource_type, tags in six.iteritems(tag_spec): + converted_tag_spec.append({ + "ResourceType": resource_type, + "Tags": [{"Key": key, "Value": value} for key, value in six.iteritems(tags)], + }) + + parsed_template_data['TagSpecifications'].extend(converted_tag_spec) + template = self.ec2_backend.create_launch_template(name, version_description, parsed_template_data) version = template.default_version() diff --git a/tests/test_ec2/test_launch_templates.py b/tests/test_ec2/test_launch_templates.py index afe0488ce..0cdd7ae31 100644 --- a/tests/test_ec2/test_launch_templates.py +++ b/tests/test_ec2/test_launch_templates.py @@ -356,3 +356,32 @@ def test_describe_launch_templates_with_filters(): resp["LaunchTemplates"].should.have.length_of(1) resp["LaunchTemplates"][0]["LaunchTemplateName"].should.equal("no-tags") + +@mock_ec2 +def test_create_launch_template_with_tag_spec(): + cli = boto3.client("ec2", region_name="us-east-1") + + cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={"ImageId":"ami-abc123"}, + TagSpecifications=[{ + "ResourceType": "instance", + "Tags": [ + {"Key": "key", "Value": "value"} + ] + }], + ) + + resp = cli.describe_launch_template_versions( + LaunchTemplateName="test-template", + Versions=["1"]) + version = resp["LaunchTemplateVersions"][0] + + version["LaunchTemplateData"].should.have.key("TagSpecifications") + version["LaunchTemplateData"]["TagSpecifications"].should.have.length_of(1) + version["LaunchTemplateData"]["TagSpecifications"][0].should.equal({ + "ResourceType": "instance", + "Tags": [ + {"Key": "key", "Value": "value"} + ] + }) From 4929298f1f339a0e9110a4ce87b2f1052e08d9cb Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Mon, 19 Aug 2019 17:58:48 -0500 Subject: [PATCH 23/42] Test create_launch_template_version using launch_template id --- tests/test_ec2/test_launch_templates.py | 27 ++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/tests/test_ec2/test_launch_templates.py b/tests/test_ec2/test_launch_templates.py index 0cdd7ae31..bac98f8fc 100644 --- a/tests/test_ec2/test_launch_templates.py +++ b/tests/test_ec2/test_launch_templates.py @@ -114,7 +114,32 @@ def test_create_launch_template_version(): version["VersionNumber"].should.equal(2) @mock_ec2 -def test_describe_template_versions_with_multiple_versions(): +def test_create_launch_template_version_by_id(): + cli = boto3.client("ec2", region_name="us-east-1") + + create_resp = cli.create_launch_template( + LaunchTemplateName="test-template", + LaunchTemplateData={ + "ImageId": "ami-abc123" + }) + + version_resp = cli.create_launch_template_version( + LaunchTemplateId=create_resp["LaunchTemplate"]["LaunchTemplateId"], + LaunchTemplateData={ + "ImageId": "ami-def456" + }, + VersionDescription="new ami") + + version_resp.should.have.key("LaunchTemplateVersion") + version = version_resp["LaunchTemplateVersion"] + version["DefaultVersion"].should.equal(False) + version["LaunchTemplateId"].should.equal(create_resp["LaunchTemplate"]["LaunchTemplateId"]) + version["VersionDescription"].should.equal("new ami") + version["VersionNumber"].should.equal(2) + + +@mock_ec2 +def test_describe_launch_template_versions_with_multiple_versions(): cli = boto3.client("ec2", region_name="us-east-1") cli.create_launch_template( From d2ce3a9e043ba8f7b22eea6cda1a5c09624fc354 Mon Sep 17 00:00:00 2001 From: Don Kuntz Date: Mon, 19 Aug 2019 18:01:44 -0500 Subject: [PATCH 24/42] Flake8 fixes --- moto/ec2/responses/launch_templates.py | 4 +--- tests/test_ec2/test_launch_templates.py | 9 ++++++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/moto/ec2/responses/launch_templates.py b/moto/ec2/responses/launch_templates.py index 4863492bb..a8d92a928 100644 --- a/moto/ec2/responses/launch_templates.py +++ b/moto/ec2/responses/launch_templates.py @@ -37,7 +37,7 @@ def xml_serialize(tree, key, value): elif isinstance(value, list): for item in value: xml_serialize(node, 'item', item) - elif value == None: + elif value is None: pass else: raise NotImplementedError("Don't know how to serialize \"{}\" to xml".format(value.__class__)) @@ -99,7 +99,6 @@ class LaunchTemplates(BaseResponse): raw_template_data = self._get_dict_param('LaunchTemplateData.') parsed_template_data = parse_object(raw_template_data) - if self.is_not_dryrun('CreateLaunchTemplate'): if tag_spec: if 'TagSpecifications' not in parsed_template_data: @@ -176,7 +175,6 @@ class LaunchTemplates(BaseResponse): min_version = self._get_int_param("MinVersion") max_version = self._get_int_param("MaxVersion") - filters = filters_from_querystring(self.querystring) if filters: raise FilterNotImplementedError("all filters", "DescribeLaunchTemplateVersions") diff --git a/tests/test_ec2/test_launch_templates.py b/tests/test_ec2/test_launch_templates.py index bac98f8fc..87e1d3986 100644 --- a/tests/test_ec2/test_launch_templates.py +++ b/tests/test_ec2/test_launch_templates.py @@ -6,6 +6,7 @@ from botocore.client import ClientError from moto import mock_ec2 + @mock_ec2 def test_launch_template_create(): cli = boto3.client("ec2", region_name="us-east-1") @@ -89,6 +90,7 @@ def test_describe_launch_template_versions(): templ = resp["LaunchTemplateVersions"][0]["LaunchTemplateData"] templ.should.equal(template_data) + @mock_ec2 def test_create_launch_template_version(): cli = boto3.client("ec2", region_name="us-east-1") @@ -113,6 +115,7 @@ def test_create_launch_template_version(): version["VersionDescription"].should.equal("new ami") version["VersionNumber"].should.equal(2) + @mock_ec2 def test_create_launch_template_version_by_id(): cli = boto3.client("ec2", region_name="us-east-1") @@ -189,7 +192,7 @@ def test_describe_launch_template_versions_with_versions_option(): resp = cli.describe_launch_template_versions( LaunchTemplateName="test-template", - Versions=["2","3"]) + Versions=["2", "3"]) resp["LaunchTemplateVersions"].should.have.length_of(2) resp["LaunchTemplateVersions"][0]["LaunchTemplateData"]["ImageId"].should.equal("ami-def456") @@ -303,7 +306,6 @@ def test_describe_launch_template_versions_with_min_and_max(): resp["LaunchTemplateVersions"][1]["LaunchTemplateData"]["ImageId"].should.equal("ami-hij789") - @mock_ec2 def test_describe_launch_templates(): cli = boto3.client("ec2", region_name="us-east-1") @@ -345,6 +347,7 @@ def test_describe_launch_templates(): resp["LaunchTemplates"][0]["LaunchTemplateName"].should.equal("test-template") resp["LaunchTemplates"][1]["LaunchTemplateName"].should.equal("test-template2") + @mock_ec2 def test_describe_launch_templates_with_filters(): cli = boto3.client("ec2", region_name="us-east-1") @@ -388,7 +391,7 @@ def test_create_launch_template_with_tag_spec(): cli.create_launch_template( LaunchTemplateName="test-template", - LaunchTemplateData={"ImageId":"ami-abc123"}, + LaunchTemplateData={"ImageId": "ami-abc123"}, TagSpecifications=[{ "ResourceType": "instance", "Tags": [ From 66a7ace2c56e0ac2e9ba12dedaf23f51560b3570 Mon Sep 17 00:00:00 2001 From: Asher Foa <1268088+asherf@users.noreply.github.com> Date: Mon, 19 Aug 2019 14:56:34 -0700 Subject: [PATCH 25/42] Use the specified region name when generating ARN for a requested cert. --- moto/acm/models.py | 6 +++--- tests/test_acm/test_acm.py | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/moto/acm/models.py b/moto/acm/models.py index 15a1bd44d..6acd91f85 100644 --- a/moto/acm/models.py +++ b/moto/acm/models.py @@ -105,7 +105,7 @@ class CertBundle(BaseModel): self.arn = arn @classmethod - def generate_cert(cls, domain_name, sans=None): + def generate_cert(cls, domain_name, region, sans=None): if sans is None: sans = set() else: @@ -152,7 +152,7 @@ class CertBundle(BaseModel): encryption_algorithm=serialization.NoEncryption() ) - return cls(cert_armored, private_key, cert_type='AMAZON_ISSUED', cert_status='PENDING_VALIDATION') + return cls(cert_armored, private_key, cert_type='AMAZON_ISSUED', cert_status='PENDING_VALIDATION', region=region) def validate_pk(self): try: @@ -355,7 +355,7 @@ class AWSCertificateManagerBackend(BaseBackend): if arn is not None: return arn - cert = CertBundle.generate_cert(domain_name, subject_alt_names) + cert = CertBundle.generate_cert(domain_name, region=self.region, sans=subject_alt_names) if idempotency_token is not None: self._set_idempotency_token_arn(idempotency_token, cert.arn) self._certificates[cert.arn] = cert diff --git a/tests/test_acm/test_acm.py b/tests/test_acm/test_acm.py index ccac48181..99123c4bb 100644 --- a/tests/test_acm/test_acm.py +++ b/tests/test_acm/test_acm.py @@ -291,6 +291,7 @@ def test_request_certificate(): ) resp.should.contain('CertificateArn') arn = resp['CertificateArn'] + arn.should.match(r"arn:aws:acm:eu-central-1:\d{12}:certificate/") resp = client.request_certificate( DomainName='google.com', From d669145b71f3b8bcb2f41caf62f8a0b4fc773e49 Mon Sep 17 00:00:00 2001 From: Asher Foa <1268088+asherf@users.noreply.github.com> Date: Mon, 19 Aug 2019 17:29:14 -0700 Subject: [PATCH 26/42] Filter certs by statuses. --- moto/acm/models.py | 6 ++++-- moto/acm/responses.py | 4 ++-- tests/test_acm/test_acm.py | 25 +++++++++++++++++++++++++ 3 files changed, 31 insertions(+), 4 deletions(-) diff --git a/moto/acm/models.py b/moto/acm/models.py index 15a1bd44d..f2ed9ae57 100644 --- a/moto/acm/models.py +++ b/moto/acm/models.py @@ -325,7 +325,7 @@ class AWSCertificateManagerBackend(BaseBackend): return bundle.arn - def get_certificates_list(self): + def get_certificates_list(self, statuses): """ Get list of certificates @@ -333,7 +333,9 @@ class AWSCertificateManagerBackend(BaseBackend): :rtype: list of CertBundle """ for arn in self._certificates.keys(): - yield self.get_certificate(arn) + cert = self.get_certificate(arn) + if not statuses or cert.status in statuses: + yield cert def get_certificate(self, arn): if arn not in self._certificates: diff --git a/moto/acm/responses.py b/moto/acm/responses.py index 38ebbaaa0..0d0ac640b 100644 --- a/moto/acm/responses.py +++ b/moto/acm/responses.py @@ -132,8 +132,8 @@ class AWSCertificateManagerResponse(BaseResponse): def list_certificates(self): certs = [] - - for cert_bundle in self.acm_backend.get_certificates_list(): + statuses = self._get_param('CertificateStatuses') + for cert_bundle in self.acm_backend.get_certificates_list(statuses): certs.append({ 'CertificateArn': cert_bundle.arn, 'DomainName': cert_bundle.common_name diff --git a/tests/test_acm/test_acm.py b/tests/test_acm/test_acm.py index ccac48181..32a25f5bc 100644 --- a/tests/test_acm/test_acm.py +++ b/tests/test_acm/test_acm.py @@ -74,6 +74,31 @@ def test_list_certificates(): resp['CertificateSummaryList'][0]['DomainName'].should.equal(SERVER_COMMON_NAME) +@mock_acm +def test_list_certificates_by_status(): + client = boto3.client('acm', region_name='eu-central-1') + issued_arn = _import_cert(client) + pending_arn = client.request_certificate(DomainName='google.com')['CertificateArn'] + + resp = client.list_certificates() + len(resp['CertificateSummaryList']).should.equal(2) + resp = client.list_certificates(CertificateStatuses=['EXPIRED', 'INACTIVE']) + len(resp['CertificateSummaryList']).should.equal(0) + resp = client.list_certificates(CertificateStatuses=['PENDING_VALIDATION']) + len(resp['CertificateSummaryList']).should.equal(1) + resp['CertificateSummaryList'][0]['CertificateArn'].should.equal(pending_arn) + + resp = client.list_certificates(CertificateStatuses=['ISSUED']) + len(resp['CertificateSummaryList']).should.equal(1) + resp['CertificateSummaryList'][0]['CertificateArn'].should.equal(issued_arn) + resp = client.list_certificates(CertificateStatuses=['ISSUED', 'PENDING_VALIDATION']) + len(resp['CertificateSummaryList']).should.equal(2) + arns = {cert['CertificateArn'] for cert in resp['CertificateSummaryList']} + arns.should.contain(issued_arn) + arns.should.contain(pending_arn) + + + @mock_acm def test_get_invalid_certificate(): client = boto3.client('acm', region_name='eu-central-1') From 1249ba8d3b1b833b2a672c47f52d3031e632edd2 Mon Sep 17 00:00:00 2001 From: Vury Leo Date: Tue, 20 Aug 2019 15:01:37 +0800 Subject: [PATCH 27/42] fix KeyCount in s3.list_objects_v2 --- moto/s3/responses.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index a05a86de4..77b87535b 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -463,10 +463,13 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): else: result_folders, is_truncated, next_continuation_token = self._truncate_result(result_folders, max_keys) + key_count = len(result_keys) + len(result_folders) + return template.render( bucket=bucket, prefix=prefix or '', delimiter=delimiter, + key_count=key_count, result_keys=result_keys, result_folders=result_folders, fetch_owner=fetch_owner, @@ -1330,7 +1333,7 @@ S3_BUCKET_GET_RESPONSE_V2 = """ {{ bucket.name }} {{ prefix }} {{ max_keys }} - {{ result_keys | length }} + {{ key_count }} {% if delimiter %} {{ delimiter }} {% endif %} From 71241f1c3f80667c6740cf4b66850435c7f0041e Mon Sep 17 00:00:00 2001 From: Vury Leo Date: Tue, 20 Aug 2019 15:17:17 +0800 Subject: [PATCH 28/42] make linter happy --- moto/s3/responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 77b87535b..ee047a14f 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -464,7 +464,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin): result_folders, is_truncated, next_continuation_token = self._truncate_result(result_folders, max_keys) key_count = len(result_keys) + len(result_folders) - + return template.render( bucket=bucket, prefix=prefix or '', From ccceb70397bcec08ee52ed392839cd125cd26a28 Mon Sep 17 00:00:00 2001 From: Randy Westergren Date: Tue, 20 Aug 2019 21:54:57 -0400 Subject: [PATCH 29/42] And event source mapping endpoints and SQS trigger support --- moto/awslambda/models.py | 139 +++++++- moto/awslambda/responses.py | 64 ++++ moto/awslambda/urls.py | 2 + moto/sns/models.py | 2 +- moto/sqs/models.py | 29 ++ tests/test_awslambda/test_lambda.py | 314 +++++++++++++++++- .../test_cloudformation_stack_crud.py | 14 +- 7 files changed, 549 insertions(+), 15 deletions(-) diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index 784d86b0b..0fcabbf03 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import base64 +import time from collections import defaultdict import copy import datetime @@ -31,6 +32,7 @@ from moto.logs.models import logs_backends from moto.s3.exceptions import MissingBucket, MissingKey from moto import settings from .utils import make_function_arn, make_function_ver_arn +from moto.sqs import sqs_backends logger = logging.getLogger(__name__) @@ -429,24 +431,39 @@ class LambdaFunction(BaseModel): class EventSourceMapping(BaseModel): def __init__(self, spec): # required - self.function_name = spec['FunctionName'] + self.function_arn = spec['FunctionArn'] self.event_source_arn = spec['EventSourceArn'] - self.starting_position = spec['StartingPosition'] - + self.uuid = str(uuid.uuid4()) + self.last_modified = time.mktime(datetime.datetime.utcnow().timetuple()) # optional - self.batch_size = spec.get('BatchSize', 100) + self.starting_position = spec.get('StartingPosition', 'TRIM_HORIZON') + self.batch_size = spec.get('BatchSize', 10) # TODO: Add source type-specific defaults self.enabled = spec.get('Enabled', True) self.starting_position_timestamp = spec.get('StartingPositionTimestamp', None) + def get_configuration(self): + return { + 'UUID': self.uuid, + 'BatchSize': self.batch_size, + 'EventSourceArn': self.event_source_arn, + 'FunctionArn': self.function_arn, + 'LastModified': self.last_modified, + 'LastProcessingResult': '', + 'State': 'Enabled' if self.enabled else 'Disabled', + 'StateTransitionReason': 'User initiated' + } + @classmethod def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): properties = cloudformation_json['Properties'] + func = lambda_backends[region_name].get_function(properties['FunctionName']) spec = { - 'FunctionName': properties['FunctionName'], + 'FunctionArn': func.function_arn, 'EventSourceArn': properties['EventSourceArn'], - 'StartingPosition': properties['StartingPosition'] + 'StartingPosition': properties['StartingPosition'], + 'BatchSize': properties.get('BatchSize', 100) } optional_properties = 'BatchSize Enabled StartingPositionTimestamp'.split() for prop in optional_properties: @@ -466,8 +483,10 @@ class LambdaVersion(BaseModel): def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): properties = cloudformation_json['Properties'] + function_name = properties['FunctionName'] + func = lambda_backends[region_name].publish_function(function_name) spec = { - 'Version': properties.get('Version') + 'Version': func.version } return LambdaVersion(spec) @@ -515,6 +534,9 @@ class LambdaStorage(object): def get_arn(self, arn): return self._arns.get(arn, None) + def get_function_by_name_or_arn(self, input): + return self.get_function(input) or self.get_arn(input) + def put_function(self, fn): """ :param fn: Function @@ -596,6 +618,7 @@ class LambdaStorage(object): class LambdaBackend(BaseBackend): def __init__(self, region_name): self._lambdas = LambdaStorage() + self._event_source_mappings = {} self.region_name = region_name def reset(self): @@ -617,6 +640,43 @@ class LambdaBackend(BaseBackend): fn.version = ver.version return fn + def create_event_source_mapping(self, spec): + required = [ + 'EventSourceArn', + 'FunctionName', + ] + for param in required: + if not spec.get(param): + raise RESTError('InvalidParameterValueException', 'Missing {}'.format(param)) + + # Validate function name + func = self._lambdas.get_function_by_name_or_arn(spec.get('FunctionName', '')) + if not func: + raise RESTError('ResourceNotFoundException', 'Invalid FunctionName') + + # Validate queue + for queue in sqs_backends[self.region_name].queues.values(): + if queue.queue_arn == spec['EventSourceArn']: + if queue.lambda_event_source_mappings.get('func.function_arn'): + # TODO: Correct exception? + raise RESTError('ResourceConflictException', 'The resource already exists.') + if queue.fifo_queue: + raise RESTError('InvalidParameterValueException', + '{} is FIFO'.format(queue.queue_arn)) + else: + esm_spec = { + 'EventSourceArn': spec['EventSourceArn'], + 'FunctionArn': func.function_arn, + } + esm = EventSourceMapping(esm_spec) + self._event_source_mappings[esm.uuid] = esm + + # Set backend function on queue + queue.lambda_event_source_mappings[esm.function_arn] = esm + + return esm + raise RESTError('ResourceNotFoundException', 'Invalid EventSourceArn') + def publish_function(self, function_name): return self._lambdas.publish_function(function_name) @@ -626,6 +686,33 @@ class LambdaBackend(BaseBackend): def list_versions_by_function(self, function_name): return self._lambdas.list_versions_by_function(function_name) + def get_event_source_mapping(self, uuid): + return self._event_source_mappings.get(uuid) + + def delete_event_source_mapping(self, uuid): + return self._event_source_mappings.pop(uuid) + + def update_event_source_mapping(self, uuid, spec): + esm = self.get_event_source_mapping(uuid) + if esm: + if spec.get('FunctionName'): + func = self._lambdas.get_function_by_name_or_arn(spec.get('FunctionName')) + esm.function_arn = func.function_arn + if 'BatchSize' in spec: + esm.batch_size = spec['BatchSize'] + if 'Enabled' in spec: + esm.enabled = spec['Enabled'] + return esm + return False + + def list_event_source_mappings(self, event_source_arn, function_name): + esms = list(self._event_source_mappings.values()) + if event_source_arn: + esms = list(filter(lambda x: x.event_source_arn == event_source_arn, esms)) + if function_name: + esms = list(filter(lambda x: x.function_name == function_name, esms)) + return esms + def get_function_by_arn(self, function_arn): return self._lambdas.get_arn(function_arn) @@ -635,7 +722,43 @@ class LambdaBackend(BaseBackend): def list_functions(self): return self._lambdas.all() - def send_message(self, function_name, message, subject=None, qualifier=None): + def send_sqs_batch(self, function_arn, messages, queue_arn): + success = True + for message in messages: + func = self.get_function_by_arn(function_arn) + result = self._send_sqs_message(func, message, queue_arn) + if not result: + success = False + return success + + def _send_sqs_message(self, func, message, queue_arn): + event = { + "Records": [ + { + "messageId": message.id, + "receiptHandle": message.receipt_handle, + "body": message.body, + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "098f6bcd4621d373cade4e832627b4f6", + "eventSource": "aws:sqs", + "eventSourceARN": queue_arn, + "awsRegion": self.region_name + } + ] + } + + request_headers = {} + response_headers = {} + func.invoke(json.dumps(event), request_headers, response_headers) + return 'x-amz-function-error' not in response_headers + + def send_sns_message(self, function_name, message, subject=None, qualifier=None): event = { "Records": [ { diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py index c29c9acd9..1e7feb0d0 100644 --- a/moto/awslambda/responses.py +++ b/moto/awslambda/responses.py @@ -39,6 +39,31 @@ class LambdaResponse(BaseResponse): else: raise ValueError("Cannot handle request") + def event_source_mappings(self, request, full_url, headers): + self.setup_class(request, full_url, headers) + if request.method == 'GET': + querystring = self.querystring + event_source_arn = querystring.get('EventSourceArn', [None])[0] + function_name = querystring.get('FunctionName', [None])[0] + return self._list_event_source_mappings(event_source_arn, function_name) + elif request.method == 'POST': + return self._create_event_source_mapping(request, full_url, headers) + else: + raise ValueError("Cannot handle request") + + def event_source_mapping(self, request, full_url, headers): + self.setup_class(request, full_url, headers) + path = request.path if hasattr(request, 'path') else path_url(request.url) + uuid = path.split('/')[-1] + if request.method == 'GET': + return self._get_event_source_mapping(uuid) + elif request.method == 'PUT': + return self._update_event_source_mapping(uuid) + elif request.method == 'DELETE': + return self._delete_event_source_mapping(uuid) + else: + raise ValueError("Cannot handle request") + def function(self, request, full_url, headers): self.setup_class(request, full_url, headers) if request.method == 'GET': @@ -177,6 +202,45 @@ class LambdaResponse(BaseResponse): config = fn.get_configuration() return 201, {}, json.dumps(config) + def _create_event_source_mapping(self, request, full_url, headers): + try: + fn = self.lambda_backend.create_event_source_mapping(self.json_body) + except ValueError as e: + return 400, {}, json.dumps({"Error": {"Code": e.args[0], "Message": e.args[1]}}) + else: + config = fn.get_configuration() + return 201, {}, json.dumps(config) + + def _list_event_source_mappings(self, event_source_arn, function_name): + esms = self.lambda_backend.list_event_source_mappings(event_source_arn, function_name) + result = { + 'EventSourceMappings': [esm.get_configuration() for esm in esms] + } + return 200, {}, json.dumps(result) + + def _get_event_source_mapping(self, uuid): + result = self.lambda_backend.get_event_source_mapping(uuid) + if result: + return 200, {}, json.dumps(result.get_configuration()) + else: + return 404, {}, "{}" + + def _update_event_source_mapping(self, uuid): + result = self.lambda_backend.update_event_source_mapping(uuid, self.json_body) + if result: + return 202, {}, json.dumps(result.get_configuration()) + else: + return 404, {}, "{}" + + def _delete_event_source_mapping(self, uuid): + esm = self.lambda_backend.delete_event_source_mapping(uuid) + if esm: + json_result = esm.get_configuration() + json_result.update({'State': 'Deleting'}) + return 202, {}, json.dumps(json_result) + else: + return 404, {}, "{}" + def _publish_function(self, request, full_url, headers): function_name = self.path.rsplit('/', 2)[-2] diff --git a/moto/awslambda/urls.py b/moto/awslambda/urls.py index 7c4d064dc..fb2c6ee7e 100644 --- a/moto/awslambda/urls.py +++ b/moto/awslambda/urls.py @@ -11,6 +11,8 @@ url_paths = { '{0}/(?P[^/]+)/functions/?$': response.root, r'{0}/(?P[^/]+)/functions/(?P[\w_-]+)/?$': response.function, r'{0}/(?P[^/]+)/functions/(?P[\w_-]+)/versions/?$': response.versions, + r'{0}/(?P[^/]+)/event-source-mappings/?$': response.event_source_mappings, + r'{0}/(?P[^/]+)/event-source-mappings/(?P[\w_-]+)/?$': response.event_source_mapping, r'{0}/(?P[^/]+)/functions/(?P[\w_-]+)/invocations/?$': response.invoke, r'{0}/(?P[^/]+)/functions/(?P[\w_-]+)/invoke-async/?$': response.invoke_async, r'{0}/(?P[^/]+)/tags/(?P.+)': response.tag, diff --git a/moto/sns/models.py b/moto/sns/models.py index 18b86cb93..f1293eb0f 100644 --- a/moto/sns/models.py +++ b/moto/sns/models.py @@ -119,7 +119,7 @@ class Subscription(BaseModel): else: assert False - lambda_backends[region].send_message(function_name, message, subject=subject, qualifier=qualifier) + lambda_backends[region].send_sns_message(function_name, message, subject=subject, qualifier=qualifier) def _matches_filter_policy(self, message_attributes): # TODO: support Anything-but matching, prefix matching and diff --git a/moto/sqs/models.py b/moto/sqs/models.py index f2e3ed400..e774e261c 100644 --- a/moto/sqs/models.py +++ b/moto/sqs/models.py @@ -189,6 +189,8 @@ class Queue(BaseModel): self.name) self.dead_letter_queue = None + self.lambda_event_source_mappings = {} + # default settings for a non fifo queue defaults = { 'ContentBasedDeduplication': 'false', @@ -360,6 +362,33 @@ class Queue(BaseModel): def add_message(self, message): self._messages.append(message) + from moto.awslambda import lambda_backends + for arn, esm in self.lambda_event_source_mappings.items(): + backend = sqs_backends[self.region] + + """ + Lambda polls the queue and invokes your function synchronously with an event + that contains queue messages. Lambda reads messages in batches and invokes + your function once for each batch. When your function successfully processes + a batch, Lambda deletes its messages from the queue. + """ + messages = backend.receive_messages( + self.name, + esm.batch_size, + self.receive_message_wait_time_seconds, + self.visibility_timeout, + ) + + result = lambda_backends[self.region].send_sqs_batch( + arn, + messages, + self.queue_arn, + ) + + if result: + [backend.delete_message(self.name, m.receipt_handle) for m in messages] + else: + [backend.change_message_visibility(self.name, m.receipt_handle, 0) for m in messages] def get_cfn_attribute(self, attribute_name): from moto.cloudformation.exceptions import UnformattedGetAttTemplateException diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py index 9ef6fdb0d..9467b0803 100644 --- a/tests/test_awslambda/test_lambda.py +++ b/tests/test_awslambda/test_lambda.py @@ -1,6 +1,7 @@ from __future__ import unicode_literals import base64 +import uuid import botocore.client import boto3 import hashlib @@ -11,11 +12,12 @@ import zipfile import sure # noqa from freezegun import freeze_time -from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings +from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings, mock_sqs from nose.tools import assert_raises from botocore.exceptions import ClientError _lambda_region = 'us-west-2' +boto3.setup_default_session(region_name=_lambda_region) def _process_lambda(func_str): @@ -59,6 +61,13 @@ def lambda_handler(event, context): """ return _process_lambda(pfunc) +def get_test_zip_file4(): + pfunc = """ +def lambda_handler(event, context): + raise Exception('I failed!') +""" + return _process_lambda(pfunc) + @mock_lambda def test_list_functions(): @@ -933,3 +942,306 @@ def test_list_versions_by_function_for_nonexistent_function(): versions = conn.list_versions_by_function(FunctionName='testFunction') assert len(versions['Versions']) == 0 + + +@mock_logs +@mock_lambda +@mock_sqs +def test_create_event_source_mapping(): + sqs = boto3.resource('sqs') + queue = sqs.create_queue(QueueName="test-sqs-queue1") + + conn = boto3.client('lambda') + func = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'ZipFile': get_test_zip_file3(), + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + response = conn.create_event_source_mapping( + EventSourceArn=queue.attributes['QueueArn'], + FunctionName=func['FunctionArn'], + ) + + assert response['EventSourceArn'] == queue.attributes['QueueArn'] + assert response['FunctionArn'] == func['FunctionArn'] + assert response['State'] == 'Enabled' + + +@mock_logs +@mock_lambda +@mock_sqs +def test_invoke_function_from_sqs(): + logs_conn = boto3.client("logs") + sqs = boto3.resource('sqs') + queue = sqs.create_queue(QueueName="test-sqs-queue1") + + conn = boto3.client('lambda') + func = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'ZipFile': get_test_zip_file3(), + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + response = conn.create_event_source_mapping( + EventSourceArn=queue.attributes['QueueArn'], + FunctionName=func['FunctionArn'], + ) + + assert response['EventSourceArn'] == queue.attributes['QueueArn'] + assert response['State'] == 'Enabled' + + sqs_client = boto3.client('sqs') + sqs_client.send_message(QueueUrl=queue.url, MessageBody='test') + start = time.time() + while (time.time() - start) < 30: + result = logs_conn.describe_log_streams(logGroupName='/aws/lambda/testFunction') + log_streams = result.get('logStreams') + if not log_streams: + time.sleep(1) + continue + + assert len(log_streams) == 1 + result = logs_conn.get_log_events(logGroupName='/aws/lambda/testFunction', logStreamName=log_streams[0]['logStreamName']) + for event in result.get('events'): + if event['message'] == 'get_test_zip_file3 success': + return + time.sleep(1) + + assert False, "Test Failed" + + +@mock_logs +@mock_lambda +@mock_sqs +def test_invoke_function_from_sqs_exception(): + logs_conn = boto3.client("logs") + sqs = boto3.resource('sqs') + queue = sqs.create_queue(QueueName="test-sqs-queue1") + + conn = boto3.client('lambda') + func = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'ZipFile': get_test_zip_file4(), + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + + response = conn.create_event_source_mapping( + EventSourceArn=queue.attributes['QueueArn'], + FunctionName=func['FunctionArn'], + ) + + assert response['EventSourceArn'] == queue.attributes['QueueArn'] + assert response['State'] == 'Enabled' + + entries = [] + for i in range(3): + body = { + "uuid": str(uuid.uuid4()), + "test": "test_{}".format(i), + } + entry = { + 'Id': str(i), + 'MessageBody': json.dumps(body) + } + entries.append(entry) + + queue.send_messages(Entries=entries) + + start = time.time() + while (time.time() - start) < 30: + result = logs_conn.describe_log_streams(logGroupName='/aws/lambda/testFunction') + log_streams = result.get('logStreams') + if not log_streams: + time.sleep(1) + continue + assert len(log_streams) >= 1 + + result = logs_conn.get_log_events(logGroupName='/aws/lambda/testFunction', logStreamName=log_streams[0]['logStreamName']) + for event in result.get('events'): + if 'I failed!' in event['message']: + messages = queue.receive_messages(MaxNumberOfMessages=10) + # Verify messages are still visible and unprocessed + assert len(messages) is 3 + return + time.sleep(1) + + assert False, "Test Failed" + + +@mock_logs +@mock_lambda +@mock_sqs +def test_list_event_source_mappings(): + sqs = boto3.resource('sqs') + queue = sqs.create_queue(QueueName="test-sqs-queue1") + + conn = boto3.client('lambda') + func = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'ZipFile': get_test_zip_file3(), + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + response = conn.create_event_source_mapping( + EventSourceArn=queue.attributes['QueueArn'], + FunctionName=func['FunctionArn'], + ) + mappings = conn.list_event_source_mappings(EventSourceArn='123') + assert len(mappings['EventSourceMappings']) == 0 + + mappings = conn.list_event_source_mappings(EventSourceArn=queue.attributes['QueueArn']) + assert len(mappings['EventSourceMappings']) == 1 + assert mappings['EventSourceMappings'][0]['UUID'] == response['UUID'] + assert mappings['EventSourceMappings'][0]['FunctionArn'] == func['FunctionArn'] + + +@mock_lambda +@mock_sqs +def test_get_event_source_mapping(): + sqs = boto3.resource('sqs') + queue = sqs.create_queue(QueueName="test-sqs-queue1") + + conn = boto3.client('lambda') + func = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'ZipFile': get_test_zip_file3(), + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + response = conn.create_event_source_mapping( + EventSourceArn=queue.attributes['QueueArn'], + FunctionName=func['FunctionArn'], + ) + mapping = conn.get_event_source_mapping(UUID=response['UUID']) + assert mapping['UUID'] == response['UUID'] + assert mapping['FunctionArn'] == func['FunctionArn'] + + conn.get_event_source_mapping.when.called_with(UUID='1')\ + .should.throw(botocore.client.ClientError) + + +@mock_lambda +@mock_sqs +def test_update_event_source_mapping(): + sqs = boto3.resource('sqs') + queue = sqs.create_queue(QueueName="test-sqs-queue1") + + conn = boto3.client('lambda') + func1 = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'ZipFile': get_test_zip_file3(), + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + func2 = conn.create_function( + FunctionName='testFunction2', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'ZipFile': get_test_zip_file3(), + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + response = conn.create_event_source_mapping( + EventSourceArn=queue.attributes['QueueArn'], + FunctionName=func1['FunctionArn'], + ) + assert response['FunctionArn'] == func1['FunctionArn'] + assert response['BatchSize'] == 10 + assert response['State'] == 'Enabled' + + mapping = conn.update_event_source_mapping( + UUID=response['UUID'], + Enabled=False, + BatchSize=15, + FunctionName='testFunction2' + + ) + assert mapping['UUID'] == response['UUID'] + assert mapping['FunctionArn'] == func2['FunctionArn'] + assert mapping['State'] == 'Disabled' + + +@mock_lambda +@mock_sqs +def test_delete_event_source_mapping(): + sqs = boto3.resource('sqs') + queue = sqs.create_queue(QueueName="test-sqs-queue1") + + conn = boto3.client('lambda') + func1 = conn.create_function( + FunctionName='testFunction', + Runtime='python2.7', + Role='test-iam-role', + Handler='lambda_function.lambda_handler', + Code={ + 'ZipFile': get_test_zip_file3(), + }, + Description='test lambda function', + Timeout=3, + MemorySize=128, + Publish=True, + ) + response = conn.create_event_source_mapping( + EventSourceArn=queue.attributes['QueueArn'], + FunctionName=func1['FunctionArn'], + ) + assert response['FunctionArn'] == func1['FunctionArn'] + assert response['BatchSize'] == 10 + assert response['State'] == 'Enabled' + + response = conn.delete_event_source_mapping(UUID=response['UUID']) + + assert response['State'] == 'Deleting' + conn.get_event_source_mapping.when.called_with(UUID=response['UUID'])\ + .should.throw(botocore.client.ClientError) diff --git a/tests/test_cloudformation/test_cloudformation_stack_crud.py b/tests/test_cloudformation/test_cloudformation_stack_crud.py index b7906632b..27424bf8c 100644 --- a/tests/test_cloudformation/test_cloudformation_stack_crud.py +++ b/tests/test_cloudformation/test_cloudformation_stack_crud.py @@ -593,9 +593,11 @@ def test_create_stack_lambda_and_dynamodb(): } }, "func1version": { - "Type": "AWS::Lambda::LambdaVersion", - "Properties" : { - "Version": "v1.2.3" + "Type": "AWS::Lambda::Version", + "Properties": { + "FunctionName": { + "Ref": "func1" + } } }, "tab1": { @@ -618,8 +620,10 @@ def test_create_stack_lambda_and_dynamodb(): }, "func1mapping": { "Type": "AWS::Lambda::EventSourceMapping", - "Properties" : { - "FunctionName": "v1.2.3", + "Properties": { + "FunctionName": { + "Ref": "func1" + }, "EventSourceArn": "arn:aws:dynamodb:region:XXXXXX:table/tab1/stream/2000T00:00:00.000", "StartingPosition": "0", "BatchSize": 100, From d9cb1f2d353ccc8d0ef77f5a98fc2d8f4ca82252 Mon Sep 17 00:00:00 2001 From: acsbendi Date: Wed, 21 Aug 2019 10:45:36 +0200 Subject: [PATCH 30/42] Implemented returning random assumed role ID. --- moto/sts/models.py | 3 ++- moto/sts/responses.py | 2 +- moto/sts/utils.py | 20 +++++++++++++++----- tests/test_sts/test_sts.py | 30 +++++++++++++++++------------- 4 files changed, 35 insertions(+), 20 deletions(-) diff --git a/moto/sts/models.py b/moto/sts/models.py index e437575bf..ff9de84b0 100644 --- a/moto/sts/models.py +++ b/moto/sts/models.py @@ -2,7 +2,7 @@ from __future__ import unicode_literals import datetime from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds -from moto.sts.utils import random_access_key_id, random_secret_access_key, random_session_token +from moto.sts.utils import random_access_key_id, random_secret_access_key, random_session_token, random_assumed_role_id class Token(BaseModel): @@ -30,6 +30,7 @@ class AssumedRole(BaseModel): self.access_key_id = "ASIA" + random_access_key_id() self.secret_access_key = random_secret_access_key() self.session_token = random_session_token() + self.assumed_role_id = "AROA" + random_assumed_role_id() @property def expiration_ISO8601(self): diff --git a/moto/sts/responses.py b/moto/sts/responses.py index 24ec181ba..c04d1636e 100644 --- a/moto/sts/responses.py +++ b/moto/sts/responses.py @@ -91,7 +91,7 @@ ASSUME_ROLE_RESPONSE = """ Date: Wed, 21 Aug 2019 12:20:35 +0200 Subject: [PATCH 31/42] Implemented get-caller-identity returning real data depending on the access key used. --- moto/sts/models.py | 12 ++++++- moto/sts/responses.py | 34 ++++++++++++++----- tests/test_sts/test_sts.py | 67 +++++++++++++++++++++++++++++++++----- 3 files changed, 96 insertions(+), 17 deletions(-) diff --git a/moto/sts/models.py b/moto/sts/models.py index ff9de84b0..295260067 100644 --- a/moto/sts/models.py +++ b/moto/sts/models.py @@ -22,7 +22,7 @@ class AssumedRole(BaseModel): def __init__(self, role_session_name, role_arn, policy, duration, external_id): self.session_name = role_session_name - self.arn = role_arn + self.arn = role_arn + "/" + role_session_name self.policy = policy now = datetime.datetime.utcnow() self.expiration = now + datetime.timedelta(seconds=duration) @@ -36,6 +36,10 @@ class AssumedRole(BaseModel): def expiration_ISO8601(self): return iso_8601_datetime_with_milliseconds(self.expiration) + @property + def user_id(self): + return self.assumed_role_id + ":" + self.session_name + class STSBackend(BaseBackend): @@ -55,5 +59,11 @@ class STSBackend(BaseBackend): self.assumed_roles.append(role) return role + def get_assumed_role_from_access_key(self, access_key_id): + for assumed_role in self.assumed_roles: + if assumed_role.access_key_id == access_key_id: + return assumed_role + return None + sts_backend = STSBackend() diff --git a/moto/sts/responses.py b/moto/sts/responses.py index c04d1636e..2dbe0dc1c 100644 --- a/moto/sts/responses.py +++ b/moto/sts/responses.py @@ -1,6 +1,8 @@ from __future__ import unicode_literals from moto.core.responses import BaseResponse +from moto.iam.models import ACCOUNT_ID +from moto.iam import iam_backend from .models import sts_backend @@ -19,7 +21,7 @@ class TokenResponse(BaseResponse): token = sts_backend.get_federation_token( duration=duration, name=name, policy=policy) template = self.response_template(GET_FEDERATION_TOKEN_RESPONSE) - return template.render(token=token) + return template.render(token=token, account_id=ACCOUNT_ID) def assume_role(self): role_session_name = self.querystring.get('RoleSessionName')[0] @@ -41,7 +43,23 @@ class TokenResponse(BaseResponse): def get_caller_identity(self): template = self.response_template(GET_CALLER_IDENTITY_RESPONSE) - return template.render() + + # Default values in case the request does not use valid credentials generated by moto + user_id = "AKIAIOSFODNN7EXAMPLE" + arn = "arn:aws:sts::{account_id}:user/moto".format(account_id=ACCOUNT_ID) + + access_key_id = self.get_current_user() + assumed_role = sts_backend.get_assumed_role_from_access_key(access_key_id) + if assumed_role: + user_id = assumed_role.user_id + arn = assumed_role.arn + + user = iam_backend.get_user_from_access_key_id(access_key_id) + if user: + user_id = user.id + arn = user.arn + + return template.render(account_id=ACCOUNT_ID, user_id=user_id, arn=arn) GET_SESSION_TOKEN_RESPONSE = """ @@ -69,8 +87,8 @@ GET_FEDERATION_TOKEN_RESPONSE = """ - arn:aws:sts::123456789012:user/moto - AKIAIOSFODNN7EXAMPLE - 123456789012 + {{ arn }} + {{ user_id }} + {{ account_id }} c6104cbe-af31-11e0-8154-cbc7ccf896c7 diff --git a/tests/test_sts/test_sts.py b/tests/test_sts/test_sts.py index 3cc44b992..49fc1f2bf 100644 --- a/tests/test_sts/test_sts.py +++ b/tests/test_sts/test_sts.py @@ -6,7 +6,8 @@ import boto3 from freezegun import freeze_time import sure # noqa -from moto import mock_sts, mock_sts_deprecated +from moto import mock_sts, mock_sts_deprecated, mock_iam +from moto.iam.models import ACCOUNT_ID @freeze_time("2012-01-01 12:00:00") @@ -26,7 +27,8 @@ def test_get_session_token(): @mock_sts_deprecated def test_get_federation_token(): conn = boto.connect_sts() - token = conn.get_federation_token(duration=123, name="Bob") + token_name = "Bob" + token = conn.get_federation_token(duration=123, name=token_name) token.credentials.expiration.should.equal('2012-01-01T12:02:03.000Z') token.credentials.session_token.should.equal( @@ -35,8 +37,8 @@ def test_get_federation_token(): token.credentials.secret_key.should.equal( "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY") token.federated_user_arn.should.equal( - "arn:aws:sts::123456789012:federated-user/Bob") - token.federated_user_id.should.equal("123456789012:Bob") + "arn:aws:sts::{account_id}:federated-user/{token_name}".format(account_id=ACCOUNT_ID, token_name=token_name)) + token.federated_user_id.should.equal(str(ACCOUNT_ID) + ":" + token_name) @freeze_time("2012-01-01 12:00:00") @@ -72,17 +74,66 @@ def test_assume_role(): assert credentials['AccessKeyId'].startswith("ASIA") credentials['SecretAccessKey'].should.have.length_of(40) - assume_role_response['AssumedRoleUser']['Arn'].should.equal("arn:aws:iam::123456789012:role/test-role") + assume_role_response['AssumedRoleUser']['Arn'].should.equal("arn:aws:iam::123456789012:role/test-role/" + session_name) assert assume_role_response['AssumedRoleUser']['AssumedRoleId'].startswith("AROA") assert assume_role_response['AssumedRoleUser']['AssumedRoleId'].endswith(":" + session_name) assume_role_response['AssumedRoleUser']['AssumedRoleId'].should.have.length_of(21 + 1 + len(session_name)) @mock_sts -def test_get_caller_identity(): +def test_get_caller_identity_with_default_credentials(): identity = boto3.client( "sts", region_name='us-east-1').get_caller_identity() - identity['Arn'].should.equal('arn:aws:sts::123456789012:user/moto') + identity['Arn'].should.equal('arn:aws:sts::{account_id}:user/moto'.format(account_id=ACCOUNT_ID)) identity['UserId'].should.equal('AKIAIOSFODNN7EXAMPLE') - identity['Account'].should.equal('123456789012') + identity['Account'].should.equal(str(ACCOUNT_ID)) + + +@mock_sts +@mock_iam +def test_get_caller_identity_with_iam_user_credentials(): + iam_client = boto3.client("iam", region_name='us-east-1') + iam_user_name = "new-user" + iam_user = iam_client.create_user(UserName=iam_user_name)['User'] + access_key = iam_client.create_access_key(UserName=iam_user_name)['AccessKey'] + + identity = boto3.client( + "sts", region_name='us-east-1', aws_access_key_id=access_key['AccessKeyId'], + aws_secret_access_key=access_key['SecretAccessKey']).get_caller_identity() + + identity['Arn'].should.equal(iam_user['Arn']) + identity['UserId'].should.equal(iam_user['UserId']) + identity['Account'].should.equal(str(ACCOUNT_ID)) + + +@mock_sts +@mock_iam +def test_get_caller_identity_with_assumed_role_credentials(): + iam_client = boto3.client("iam", region_name='us-east-1') + sts_client = boto3.client("sts", region_name='us-east-1') + iam_role_name = "new-user" + trust_policy_document = { + "Version": "2012-10-17", + "Statement": { + "Effect": "Allow", + "Principal": {"AWS": "arn:aws:iam::{account_id}:root".format(account_id=ACCOUNT_ID)}, + "Action": "sts:AssumeRole" + } + } + iam_role_arn = iam_client.role_arn = iam_client.create_role( + RoleName=iam_role_name, + AssumeRolePolicyDocument=json.dumps(trust_policy_document) + )['Role']['Arn'] + session_name = "new-session" + assumed_role = sts_client.assume_role(RoleArn=iam_role_arn, + RoleSessionName=session_name) + access_key = assumed_role['Credentials'] + + identity = boto3.client( + "sts", region_name='us-east-1', aws_access_key_id=access_key['AccessKeyId'], + aws_secret_access_key=access_key['SecretAccessKey']).get_caller_identity() + + identity['Arn'].should.equal(assumed_role['AssumedRoleUser']['Arn']) + identity['UserId'].should.equal(assumed_role['AssumedRoleUser']['AssumedRoleId']) + identity['Account'].should.equal(str(ACCOUNT_ID)) From 27fdbb7736961cf83522b902fc256cd1ecffc06c Mon Sep 17 00:00:00 2001 From: acsbendi Date: Wed, 21 Aug 2019 12:57:45 +0200 Subject: [PATCH 32/42] Derive ARN of AssumedRoles from its role ARN and session name. --- moto/core/access_control.py | 2 +- moto/sts/models.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/moto/core/access_control.py b/moto/core/access_control.py index 800b7550f..c64acf20c 100644 --- a/moto/core/access_control.py +++ b/moto/core/access_control.py @@ -106,7 +106,7 @@ class AssumedRoleAccessKey(object): self._access_key_id = access_key_id self._secret_access_key = assumed_role.secret_access_key self._session_token = assumed_role.session_token - self._owner_role_name = assumed_role.arn.split("/")[-1] + self._owner_role_name = assumed_role.role_arn.split("/")[-1] self._session_name = assumed_role.session_name if headers["X-Amz-Security-Token"] != self._session_token: raise CreateAccessKeyFailure(reason="InvalidToken") diff --git a/moto/sts/models.py b/moto/sts/models.py index 8ff6d9838..a471b5278 100644 --- a/moto/sts/models.py +++ b/moto/sts/models.py @@ -22,7 +22,7 @@ class AssumedRole(BaseModel): def __init__(self, role_session_name, role_arn, policy, duration, external_id): self.session_name = role_session_name - self.arn = role_arn + "/" + role_session_name + self.role_arn = role_arn self.policy = policy now = datetime.datetime.utcnow() self.expiration = now + datetime.timedelta(seconds=duration) @@ -40,6 +40,10 @@ class AssumedRole(BaseModel): def user_id(self): return self.assumed_role_id + ":" + self.session_name + @property + def arn(self): + return self.role_arn + "/" + self.session_name + class STSBackend(BaseBackend): From 6bdbd0dbc87108c968dcfe1066a87564a93b7984 Mon Sep 17 00:00:00 2001 From: acsbendi Date: Wed, 21 Aug 2019 13:17:58 +0200 Subject: [PATCH 33/42] Fixed a broken test case and parameterized account ID in STS tests. --- tests/test_sts/test_sts.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/test_sts/test_sts.py b/tests/test_sts/test_sts.py index f61fa3e08..efbe83148 100644 --- a/tests/test_sts/test_sts.py +++ b/tests/test_sts/test_sts.py @@ -66,7 +66,7 @@ def test_assume_role(): }, ] }) - s3_role = "arn:aws:iam::123456789012:role/test-role" + s3_role = "arn:aws:iam::{account_id}:role/test-role".format(account_id=ACCOUNT_ID) assume_role_response = client.assume_role(RoleArn=s3_role, RoleSessionName=session_name, Policy=policy, DurationSeconds=900) @@ -78,7 +78,7 @@ def test_assume_role(): assert credentials['AccessKeyId'].startswith("ASIA") credentials['SecretAccessKey'].should.have.length_of(40) - assume_role_response['AssumedRoleUser']['Arn'].should.equal("arn:aws:iam::123456789012:role/test-role/" + session_name) + assume_role_response['AssumedRoleUser']['Arn'].should.equal(s3_role + "/" + session_name) assert assume_role_response['AssumedRoleUser']['AssumedRoleId'].startswith("AROA") assert assume_role_response['AssumedRoleUser']['AssumedRoleId'].endswith(":" + session_name) assume_role_response['AssumedRoleUser']['AssumedRoleId'].should.have.length_of(21 + 1 + len(session_name)) @@ -103,9 +103,10 @@ def test_assume_role_with_web_identity(): }, ] }) - s3_role = "arn:aws:iam::123456789012:role/test-role" + s3_role = "arn:aws:iam::{account_id}:role/test-role".format(account_id=ACCOUNT_ID) + session_name = "session-name" role = conn.assume_role_with_web_identity( - s3_role, "session-name", policy, duration_seconds=123) + s3_role, session_name, policy, duration_seconds=123) credentials = role.credentials credentials.expiration.should.equal('2012-01-01T12:02:03.000Z') @@ -115,7 +116,7 @@ def test_assume_role_with_web_identity(): assert credentials.access_key.startswith("ASIA") credentials.secret_key.should.have.length_of(40) - role.user.arn.should.equal("arn:aws:iam::123456789012:role/test-role") + role.user.arn.should.equal(s3_role + "/" + session_name) role.user.assume_role_id.should.contain("session-name") From 3012740699f794454d546bce71bbb9306342a58e Mon Sep 17 00:00:00 2001 From: acsbendi Date: Wed, 21 Aug 2019 19:47:12 +0200 Subject: [PATCH 34/42] Fixed AssumedRole ARN. --- moto/sts/models.py | 7 ++++++- tests/test_sts/test_sts.py | 12 ++++++++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/moto/sts/models.py b/moto/sts/models.py index a471b5278..c2ff7a8d3 100644 --- a/moto/sts/models.py +++ b/moto/sts/models.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals import datetime from moto.core import BaseBackend, BaseModel from moto.core.utils import iso_8601_datetime_with_milliseconds +from moto.iam.models import ACCOUNT_ID from moto.sts.utils import random_access_key_id, random_secret_access_key, random_session_token, random_assumed_role_id @@ -42,7 +43,11 @@ class AssumedRole(BaseModel): @property def arn(self): - return self.role_arn + "/" + self.session_name + return "arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format( + account_id=ACCOUNT_ID, + role_name=self.role_arn.split("/")[-1], + session_name=self.session_name + ) class STSBackend(BaseBackend): diff --git a/tests/test_sts/test_sts.py b/tests/test_sts/test_sts.py index efbe83148..ac7c4ea11 100644 --- a/tests/test_sts/test_sts.py +++ b/tests/test_sts/test_sts.py @@ -66,7 +66,8 @@ def test_assume_role(): }, ] }) - s3_role = "arn:aws:iam::{account_id}:role/test-role".format(account_id=ACCOUNT_ID) + role_name = "test-role" + s3_role = "arn:aws:iam::{account_id}:role/{role_name}".format(account_id=ACCOUNT_ID, role_name=role_name) assume_role_response = client.assume_role(RoleArn=s3_role, RoleSessionName=session_name, Policy=policy, DurationSeconds=900) @@ -78,7 +79,8 @@ def test_assume_role(): assert credentials['AccessKeyId'].startswith("ASIA") credentials['SecretAccessKey'].should.have.length_of(40) - assume_role_response['AssumedRoleUser']['Arn'].should.equal(s3_role + "/" + session_name) + assume_role_response['AssumedRoleUser']['Arn'].should.equal("arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format( + account_id=ACCOUNT_ID, role_name=role_name, session_name=session_name)) assert assume_role_response['AssumedRoleUser']['AssumedRoleId'].startswith("AROA") assert assume_role_response['AssumedRoleUser']['AssumedRoleId'].endswith(":" + session_name) assume_role_response['AssumedRoleUser']['AssumedRoleId'].should.have.length_of(21 + 1 + len(session_name)) @@ -103,7 +105,8 @@ def test_assume_role_with_web_identity(): }, ] }) - s3_role = "arn:aws:iam::{account_id}:role/test-role".format(account_id=ACCOUNT_ID) + role_name = "test-role" + s3_role = "arn:aws:iam::{account_id}:role/{role_name}".format(account_id=ACCOUNT_ID, role_name=role_name) session_name = "session-name" role = conn.assume_role_with_web_identity( s3_role, session_name, policy, duration_seconds=123) @@ -116,7 +119,8 @@ def test_assume_role_with_web_identity(): assert credentials.access_key.startswith("ASIA") credentials.secret_key.should.have.length_of(40) - role.user.arn.should.equal(s3_role + "/" + session_name) + role.user.arn.should.equal("arn:aws:sts::{account_id}:assumed-role/{role_name}/{session_name}".format( + account_id=ACCOUNT_ID, role_name=role_name, session_name=session_name)) role.user.assume_role_id.should.contain("session-name") From 38866bfcef28525a48035e023931aa1fbe1b35c6 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Wed, 21 Aug 2019 12:24:23 -0700 Subject: [PATCH 35/42] Fixed some IAM APIs for tagging and role descriptions --- moto/iam/models.py | 50 ++++++++++++---------- moto/iam/responses.py | 24 ++++++++++- tests/test_iam/test_iam.py | 86 +++++++++++++++++++++++++++++++++++++- 3 files changed, 136 insertions(+), 24 deletions(-) diff --git a/moto/iam/models.py b/moto/iam/models.py index 21bb87e02..d76df8a28 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -161,7 +161,7 @@ class InlinePolicy(Policy): class Role(BaseModel): - def __init__(self, role_id, name, assume_role_policy_document, path, permissions_boundary): + def __init__(self, role_id, name, assume_role_policy_document, path, permissions_boundary, description, tags): self.id = role_id self.name = name self.assume_role_policy_document = assume_role_policy_document @@ -169,8 +169,8 @@ class Role(BaseModel): self.policies = {} self.managed_policies = {} self.create_date = datetime.utcnow() - self.tags = {} - self.description = "" + self.tags = tags + self.description = description self.permissions_boundary = permissions_boundary @property @@ -185,7 +185,9 @@ class Role(BaseModel): role_name=resource_name, assume_role_policy_document=properties['AssumeRolePolicyDocument'], path=properties.get('Path', '/'), - permissions_boundary=properties.get('PermissionsBoundary', '') + permissions_boundary=properties.get('PermissionsBoundary', ''), + description=properties.get('Description', ''), + tags=properties.get('Tags', {}) ) policies = properties.get('Policies', []) @@ -635,12 +637,13 @@ class IAMBackend(BaseBackend): return policies, marker - def create_role(self, role_name, assume_role_policy_document, path, permissions_boundary): + def create_role(self, role_name, assume_role_policy_document, path, permissions_boundary, description, tags): role_id = random_resource_id() if permissions_boundary and not self.policy_arn_regex.match(permissions_boundary): raise RESTError('InvalidParameterValue', 'Value ({}) for parameter PermissionsBoundary is invalid.'.format(permissions_boundary)) - role = Role(role_id, role_name, assume_role_policy_document, path, permissions_boundary) + clean_tags = self._tag_verification(tags) + role = Role(role_id, role_name, assume_role_policy_document, path, permissions_boundary, description, clean_tags) self.roles[role_id] = role return role @@ -691,6 +694,23 @@ class IAMBackend(BaseBackend): role = self.get_role(role_name) return role.policies.keys() + def _tag_verification(self, tags): + if len(tags) > 50: + raise TooManyTags(tags) + + tag_keys = {} + for tag in tags: + # Need to index by the lowercase tag key since the keys are case insensitive, but their case is retained. + ref_key = tag['Key'].lower() + self._check_tag_duplicate(tag_keys, ref_key) + self._validate_tag_key(tag['Key']) + if len(tag['Value']) > 256: + raise TagValueTooBig(tag['Value']) + + tag_keys[ref_key] = tag + + return tag_keys + def _validate_tag_key(self, tag_key, exception_param='tags.X.member.key'): """Validates the tag key. @@ -740,23 +760,9 @@ class IAMBackend(BaseBackend): return tags, marker def tag_role(self, role_name, tags): - if len(tags) > 50: - raise TooManyTags(tags) - + clean_tags = self._tag_verification(tags) role = self.get_role(role_name) - - tag_keys = {} - for tag in tags: - # Need to index by the lowercase tag key since the keys are case insensitive, but their case is retained. - ref_key = tag['Key'].lower() - self._check_tag_duplicate(tag_keys, ref_key) - self._validate_tag_key(tag['Key']) - if len(tag['Value']) > 256: - raise TagValueTooBig(tag['Value']) - - tag_keys[ref_key] = tag - - role.tags.update(tag_keys) + role.tags.update(clean_tags) def untag_role(self, role_name, tag_keys): if len(tag_keys) > 50: diff --git a/moto/iam/responses.py b/moto/iam/responses.py index 7ec6242f6..806dd37f4 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -178,9 +178,11 @@ class IamResponse(BaseResponse): 'AssumeRolePolicyDocument') permissions_boundary = self._get_param( 'PermissionsBoundary') + description = self._get_param('Description') + tags = self._get_multi_param('Tags.member') role = iam_backend.create_role( - role_name, assume_role_policy_document, path, permissions_boundary) + role_name, assume_role_policy_document, path, permissions_boundary, description, tags) template = self.response_template(CREATE_ROLE_TEMPLATE) return template.render(role=role) @@ -1002,6 +1004,7 @@ CREATE_ROLE_TEMPLATE = """{{ role.arn }} {{ role.name }} {{ role.assume_role_policy_document }} + {{role.description}} {{ role.created_iso_8601 }} {{ role.id }} + {% if role.permissions_boundary %} + + PermissionsBoundaryPolicy + {{ role.permissions_boundary }} + + {% endif %} {% endfor %} diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index e7507e2e5..fe2117a3a 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -944,7 +944,8 @@ def test_get_account_authorization_details(): }) conn = boto3.client('iam', region_name='us-east-1') - conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") + boundary = 'arn:aws:iam::123456789012:policy/boundary' + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/", Description='testing', PermissionsBoundary=boundary) conn.create_user(Path='/', UserName='testUser') conn.create_group(Path='/', GroupName='testGroup') conn.create_policy( @@ -985,6 +986,11 @@ def test_get_account_authorization_details(): assert len(result['GroupDetailList']) == 0 assert len(result['Policies']) == 0 assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1 + assert result['RoleDetailList'][0]['InstanceProfileList'][0]['Roles'][0]['Description'] == 'testing' + assert result['RoleDetailList'][0]['InstanceProfileList'][0]['Roles'][0]['PermissionsBoundary'] == { + 'PermissionsBoundaryType': 'PermissionsBoundaryPolicy', + 'PermissionsBoundaryArn': 'arn:aws:iam::123456789012:policy/boundary' + } assert len(result['RoleDetailList'][0]['Tags']) == 2 assert len(result['RoleDetailList'][0]['RolePolicyList']) == 1 assert len(result['RoleDetailList'][0]['AttachedManagedPolicies']) == 1 @@ -1151,6 +1157,79 @@ def test_delete_saml_provider(): assert not resp['Certificates'] +@mock_iam() +def test_create_role_with_tags(): + """Tests both the tag_role and get_role_tags capability""" + conn = boto3.client('iam', region_name='us-east-1') + conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="{}", Tags=[ + { + 'Key': 'somekey', + 'Value': 'somevalue' + }, + { + 'Key': 'someotherkey', + 'Value': 'someothervalue' + } + ], Description='testing') + + # Get role: + role = conn.get_role(RoleName='my-role')['Role'] + assert len(role['Tags']) == 2 + assert role['Tags'][0]['Key'] == 'somekey' + assert role['Tags'][0]['Value'] == 'somevalue' + assert role['Tags'][1]['Key'] == 'someotherkey' + assert role['Tags'][1]['Value'] == 'someothervalue' + assert role['Description'] == 'testing' + + # Empty is good: + conn.create_role(RoleName="my-role2", AssumeRolePolicyDocument="{}", Tags=[ + { + 'Key': 'somekey', + 'Value': '' + } + ]) + tags = conn.list_role_tags(RoleName='my-role2') + assert len(tags['Tags']) == 1 + assert tags['Tags'][0]['Key'] == 'somekey' + assert tags['Tags'][0]['Value'] == '' + + # Test creating tags with invalid values: + # With more than 50 tags: + with assert_raises(ClientError) as ce: + too_many_tags = list(map(lambda x: {'Key': str(x), 'Value': str(x)}, range(0, 51))) + conn.create_role(RoleName="my-role3", AssumeRolePolicyDocument="{}", Tags=too_many_tags) + assert 'failed to satisfy constraint: Member must have length less than or equal to 50.' \ + in ce.exception.response['Error']['Message'] + + # With a duplicate tag: + with assert_raises(ClientError) as ce: + conn.create_role(RoleName="my-role3", AssumeRolePolicyDocument="{}", Tags=[{'Key': '0', 'Value': ''}, {'Key': '0', 'Value': ''}]) + assert 'Duplicate tag keys found. Please note that Tag keys are case insensitive.' \ + in ce.exception.response['Error']['Message'] + + # Duplicate tag with different casing: + with assert_raises(ClientError) as ce: + conn.create_role(RoleName="my-role3", AssumeRolePolicyDocument="{}", Tags=[{'Key': 'a', 'Value': ''}, {'Key': 'A', 'Value': ''}]) + assert 'Duplicate tag keys found. Please note that Tag keys are case insensitive.' \ + in ce.exception.response['Error']['Message'] + + # With a really big key: + with assert_raises(ClientError) as ce: + conn.create_role(RoleName="my-role3", AssumeRolePolicyDocument="{}", Tags=[{'Key': '0' * 129, 'Value': ''}]) + assert 'Member must have length less than or equal to 128.' in ce.exception.response['Error']['Message'] + + # With a really big value: + with assert_raises(ClientError) as ce: + conn.create_role(RoleName="my-role3", AssumeRolePolicyDocument="{}", Tags=[{'Key': '0', 'Value': '0' * 257}]) + assert 'Member must have length less than or equal to 256.' in ce.exception.response['Error']['Message'] + + # With an invalid character: + with assert_raises(ClientError) as ce: + conn.create_role(RoleName="my-role3", AssumeRolePolicyDocument="{}", Tags=[{'Key': 'NOWAY!', 'Value': ''}]) + assert 'Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+' \ + in ce.exception.response['Error']['Message'] + + @mock_iam() def test_tag_role(): """Tests both the tag_role and get_role_tags capability""" @@ -1338,6 +1417,7 @@ def test_update_role_description(): assert response['Role']['RoleName'] == 'my-role' + @mock_iam() def test_update_role(): conn = boto3.client('iam', region_name='us-east-1') @@ -1349,6 +1429,7 @@ def test_update_role(): response = conn.update_role_description(RoleName="my-role", Description="test") assert response['Role']['RoleName'] == 'my-role' + @mock_iam() def test_update_role(): conn = boto3.client('iam', region_name='us-east-1') @@ -1443,6 +1524,8 @@ def test_create_role_no_path(): resp = conn.create_role(RoleName='my-role', AssumeRolePolicyDocument='some policy', Description='test') resp.get('Role').get('Arn').should.equal('arn:aws:iam::123456789012:role/my-role') resp.get('Role').should_not.have.key('PermissionsBoundary') + resp.get('Role').get('Description').should.equal('test') + @mock_iam() def test_create_role_with_permissions_boundary(): @@ -1454,6 +1537,7 @@ def test_create_role_with_permissions_boundary(): 'PermissionsBoundaryArn': boundary } resp.get('Role').get('PermissionsBoundary').should.equal(expected) + resp.get('Role').get('Description').should.equal('test') invalid_boundary_arn = 'arn:aws:iam::123456789:not_a_boundary' with assert_raises(ClientError): From 210b8b24eb7b25e73a8b1a8f32fc07f7a6faf8c0 Mon Sep 17 00:00:00 2001 From: Randy Westergren Date: Wed, 21 Aug 2019 17:45:37 -0400 Subject: [PATCH 36/42] Map service `BatchSize` defaults/maxes --- moto/awslambda/models.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index 0fcabbf03..08464034c 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -435,9 +435,28 @@ class EventSourceMapping(BaseModel): self.event_source_arn = spec['EventSourceArn'] self.uuid = str(uuid.uuid4()) self.last_modified = time.mktime(datetime.datetime.utcnow().timetuple()) + self.batch_size = '' # Default to blank + + # BatchSize service default/max mapping + batch_size_map = { + 'kinesis': (100, 10000), + 'dynamodb': (100, 1000), + 'sqs': (10, 10), + } + source_type = self.event_source_arn.split(":")[2].lower() + batch_size_entry = batch_size_map.get(source_type) + if batch_size_entry: + # Use service default if not provided + batch_size = int(spec.get('BatchSize', batch_size_entry[0])) + if batch_size > batch_size_entry[1]: + raise ValueError( + "InvalidParameterValueException", + "BatchSize {} exceeds the max of {}".format(batch_size, batch_size_entry[1])) + else: + self.batch_size = batch_size + # optional self.starting_position = spec.get('StartingPosition', 'TRIM_HORIZON') - self.batch_size = spec.get('BatchSize', 10) # TODO: Add source type-specific defaults self.enabled = spec.get('Enabled', True) self.starting_position_timestamp = spec.get('StartingPositionTimestamp', None) From 819dcfee247e1c1232647343dc8c8181474a187b Mon Sep 17 00:00:00 2001 From: Randy Westergren Date: Wed, 21 Aug 2019 18:00:46 -0400 Subject: [PATCH 37/42] Fix indent --- moto/awslambda/models.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index 08464034c..b31e067c3 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -449,9 +449,8 @@ class EventSourceMapping(BaseModel): # Use service default if not provided batch_size = int(spec.get('BatchSize', batch_size_entry[0])) if batch_size > batch_size_entry[1]: - raise ValueError( - "InvalidParameterValueException", - "BatchSize {} exceeds the max of {}".format(batch_size, batch_size_entry[1])) + raise ValueError("InvalidParameterValueException", + "BatchSize {} exceeds the max of {}".format(batch_size, batch_size_entry[1])) else: self.batch_size = batch_size From e568dadadc4a99ffe1d16b7c9ea070c37e00482b Mon Sep 17 00:00:00 2001 From: Steve Pulec Date: Wed, 21 Aug 2019 21:19:29 -0500 Subject: [PATCH 38/42] Move implementation percentage to separate line. Closes #2368. --- IMPLEMENTATION_COVERAGE.md | 2611 +++++++++++++++++++++++++--- scripts/implementation_coverage.py | 6 +- 2 files changed, 2389 insertions(+), 228 deletions(-) diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 897c3885c..d149b0dd8 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -1,5 +1,6 @@ -## acm - 41% implemented +## acm +38% implemented - [X] add_tags_to_certificate - [X] delete_certificate - [ ] describe_certificate @@ -9,14 +10,18 @@ - [ ] list_certificates - [ ] list_tags_for_certificate - [X] remove_tags_from_certificate +- [ ] renew_certificate - [X] request_certificate - [ ] resend_validation_email - [ ] update_certificate_options -## acm-pca - 0% implemented +## acm-pca +0% implemented - [ ] create_certificate_authority - [ ] create_certificate_authority_audit_report +- [ ] create_permission - [ ] delete_certificate_authority +- [ ] delete_permission - [ ] describe_certificate_authority - [ ] describe_certificate_authority_audit_report - [ ] get_certificate @@ -25,63 +30,145 @@ - [ ] import_certificate_authority_certificate - [ ] issue_certificate - [ ] list_certificate_authorities +- [ ] list_permissions - [ ] list_tags +- [ ] restore_certificate_authority - [ ] revoke_certificate - [ ] tag_certificate_authority - [ ] untag_certificate_authority - [ ] update_certificate_authority -## alexaforbusiness - 0% implemented +## alexaforbusiness +0% implemented +- [ ] approve_skill - [ ] associate_contact_with_address_book +- [ ] associate_device_with_network_profile - [ ] associate_device_with_room - [ ] associate_skill_group_with_room +- [ ] associate_skill_with_skill_group +- [ ] associate_skill_with_users - [ ] create_address_book +- [ ] create_business_report_schedule +- [ ] create_conference_provider - [ ] create_contact +- [ ] create_gateway_group +- [ ] create_network_profile - [ ] create_profile - [ ] create_room - [ ] create_skill_group - [ ] create_user - [ ] delete_address_book +- [ ] delete_business_report_schedule +- [ ] delete_conference_provider - [ ] delete_contact +- [ ] delete_device +- [ ] delete_device_usage_data +- [ ] delete_gateway_group +- [ ] delete_network_profile - [ ] delete_profile - [ ] delete_room - [ ] delete_room_skill_parameter +- [ ] delete_skill_authorization - [ ] delete_skill_group - [ ] delete_user - [ ] disassociate_contact_from_address_book - [ ] disassociate_device_from_room +- [ ] disassociate_skill_from_skill_group +- [ ] disassociate_skill_from_users - [ ] disassociate_skill_group_from_room +- [ ] forget_smart_home_appliances - [ ] get_address_book +- [ ] get_conference_preference +- [ ] get_conference_provider - [ ] get_contact - [ ] get_device +- [ ] get_gateway +- [ ] get_gateway_group +- [ ] get_invitation_configuration +- [ ] get_network_profile - [ ] get_profile - [ ] get_room - [ ] get_room_skill_parameter - [ ] get_skill_group +- [ ] list_business_report_schedules +- [ ] list_conference_providers +- [ ] list_device_events +- [ ] list_gateway_groups +- [ ] list_gateways - [ ] list_skills +- [ ] list_skills_store_categories +- [ ] list_skills_store_skills_by_category +- [ ] list_smart_home_appliances - [ ] list_tags +- [ ] put_conference_preference +- [ ] put_invitation_configuration - [ ] put_room_skill_parameter +- [ ] put_skill_authorization +- [ ] register_avs_device +- [ ] reject_skill - [ ] resolve_room - [ ] revoke_invitation - [ ] search_address_books - [ ] search_contacts - [ ] search_devices +- [ ] search_network_profiles - [ ] search_profiles - [ ] search_rooms - [ ] search_skill_groups - [ ] search_users +- [ ] send_announcement - [ ] send_invitation - [ ] start_device_sync +- [ ] start_smart_home_appliance_discovery - [ ] tag_resource - [ ] untag_resource - [ ] update_address_book +- [ ] update_business_report_schedule +- [ ] update_conference_provider - [ ] update_contact - [ ] update_device +- [ ] update_gateway +- [ ] update_gateway_group +- [ ] update_network_profile - [ ] update_profile - [ ] update_room - [ ] update_skill_group -## apigateway - 24% implemented +## amplify +0% implemented +- [ ] create_app +- [ ] create_branch +- [ ] create_deployment +- [ ] create_domain_association +- [ ] create_webhook +- [ ] delete_app +- [ ] delete_branch +- [ ] delete_domain_association +- [ ] delete_job +- [ ] delete_webhook +- [ ] get_app +- [ ] get_branch +- [ ] get_domain_association +- [ ] get_job +- [ ] get_webhook +- [ ] list_apps +- [ ] list_branches +- [ ] list_domain_associations +- [ ] list_jobs +- [ ] list_tags_for_resource +- [ ] list_webhooks +- [ ] start_deployment +- [ ] start_job +- [ ] stop_job +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_app +- [ ] update_branch +- [ ] update_domain_association +- [ ] update_webhook + +## apigateway +24% implemented - [ ] create_api_key - [ ] create_authorizer - [ ] create_base_path_mapping @@ -181,7 +268,7 @@ - [ ] test_invoke_method - [ ] untag_resource - [ ] update_account -- [X] update_api_key +- [ ] update_api_key - [ ] update_authorizer - [ ] update_base_path_mapping - [ ] update_client_certificate @@ -203,7 +290,74 @@ - [ ] update_usage_plan - [ ] update_vpc_link -## application-autoscaling - 0% implemented +## apigatewaymanagementapi +0% implemented +- [ ] post_to_connection + +## apigatewayv2 +0% implemented +- [ ] create_api +- [ ] create_api_mapping +- [ ] create_authorizer +- [ ] create_deployment +- [ ] create_domain_name +- [ ] create_integration +- [ ] create_integration_response +- [ ] create_model +- [ ] create_route +- [ ] create_route_response +- [ ] create_stage +- [ ] delete_api +- [ ] delete_api_mapping +- [ ] delete_authorizer +- [ ] delete_deployment +- [ ] delete_domain_name +- [ ] delete_integration +- [ ] delete_integration_response +- [ ] delete_model +- [ ] delete_route +- [ ] delete_route_response +- [ ] delete_stage +- [ ] get_api +- [ ] get_api_mapping +- [ ] get_api_mappings +- [ ] get_apis +- [ ] get_authorizer +- [ ] get_authorizers +- [ ] get_deployment +- [ ] get_deployments +- [ ] get_domain_name +- [ ] get_domain_names +- [ ] get_integration +- [ ] get_integration_response +- [ ] get_integration_responses +- [ ] get_integrations +- [ ] get_model +- [ ] get_model_template +- [ ] get_models +- [ ] get_route +- [ ] get_route_response +- [ ] get_route_responses +- [ ] get_routes +- [ ] get_stage +- [ ] get_stages +- [ ] get_tags +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_api +- [ ] update_api_mapping +- [ ] update_authorizer +- [ ] update_deployment +- [ ] update_domain_name +- [ ] update_integration +- [ ] update_integration_response +- [ ] update_model +- [ ] update_route +- [ ] update_route_response +- [ ] update_stage + +## application-autoscaling +0% implemented - [ ] delete_scaling_policy - [ ] delete_scheduled_action - [ ] deregister_scalable_target @@ -215,8 +369,61 @@ - [ ] put_scheduled_action - [ ] register_scalable_target -## appstream - 0% implemented +## application-insights +0% implemented +- [ ] create_application +- [ ] create_component +- [ ] delete_application +- [ ] delete_component +- [ ] describe_application +- [ ] describe_component +- [ ] describe_component_configuration +- [ ] describe_component_configuration_recommendation +- [ ] describe_observation +- [ ] describe_problem +- [ ] describe_problem_observations +- [ ] list_applications +- [ ] list_components +- [ ] list_problems +- [ ] update_component +- [ ] update_component_configuration + +## appmesh +0% implemented +- [ ] create_mesh +- [ ] create_route +- [ ] create_virtual_node +- [ ] create_virtual_router +- [ ] create_virtual_service +- [ ] delete_mesh +- [ ] delete_route +- [ ] delete_virtual_node +- [ ] delete_virtual_router +- [ ] delete_virtual_service +- [ ] describe_mesh +- [ ] describe_route +- [ ] describe_virtual_node +- [ ] describe_virtual_router +- [ ] describe_virtual_service +- [ ] list_meshes +- [ ] list_routes +- [ ] list_tags_for_resource +- [ ] list_virtual_nodes +- [ ] list_virtual_routers +- [ ] list_virtual_services +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_mesh +- [ ] update_route +- [ ] update_virtual_node +- [ ] update_virtual_router +- [ ] update_virtual_service + +## appstream +0% implemented - [ ] associate_fleet +- [ ] batch_associate_user_stack +- [ ] batch_disassociate_user_stack - [ ] copy_image - [ ] create_directory_config - [ ] create_fleet @@ -224,18 +431,29 @@ - [ ] create_image_builder_streaming_url - [ ] create_stack - [ ] create_streaming_url +- [ ] create_usage_report_subscription +- [ ] create_user - [ ] delete_directory_config - [ ] delete_fleet - [ ] delete_image - [ ] delete_image_builder +- [ ] delete_image_permissions - [ ] delete_stack +- [ ] delete_usage_report_subscription +- [ ] delete_user - [ ] describe_directory_configs - [ ] describe_fleets - [ ] describe_image_builders +- [ ] describe_image_permissions - [ ] describe_images - [ ] describe_sessions - [ ] describe_stacks +- [ ] describe_usage_report_subscriptions +- [ ] describe_user_stack_associations +- [ ] describe_users +- [ ] disable_user - [ ] disassociate_fleet +- [ ] enable_user - [ ] expire_session - [ ] list_associated_fleets - [ ] list_associated_stacks @@ -248,20 +466,25 @@ - [ ] untag_resource - [ ] update_directory_config - [ ] update_fleet +- [ ] update_image_permissions - [ ] update_stack -## appsync - 0% implemented +## appsync +0% implemented - [ ] create_api_key - [ ] create_data_source +- [ ] create_function - [ ] create_graphql_api - [ ] create_resolver - [ ] create_type - [ ] delete_api_key - [ ] delete_data_source +- [ ] delete_function - [ ] delete_graphql_api - [ ] delete_resolver - [ ] delete_type - [ ] get_data_source +- [ ] get_function - [ ] get_graphql_api - [ ] get_introspection_schema - [ ] get_resolver @@ -269,33 +492,51 @@ - [ ] get_type - [ ] list_api_keys - [ ] list_data_sources +- [ ] list_functions - [ ] list_graphql_apis - [ ] list_resolvers +- [ ] list_resolvers_by_function +- [ ] list_tags_for_resource - [ ] list_types - [ ] start_schema_creation +- [ ] tag_resource +- [ ] untag_resource - [ ] update_api_key - [ ] update_data_source +- [ ] update_function - [ ] update_graphql_api - [ ] update_resolver - [ ] update_type -## athena - 0% implemented +## athena +0% implemented - [ ] batch_get_named_query - [ ] batch_get_query_execution - [ ] create_named_query +- [ ] create_work_group - [ ] delete_named_query +- [ ] delete_work_group - [ ] get_named_query - [ ] get_query_execution - [ ] get_query_results +- [ ] get_work_group - [ ] list_named_queries - [ ] list_query_executions +- [ ] list_tags_for_resource +- [ ] list_work_groups - [ ] start_query_execution - [ ] stop_query_execution +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_work_group -## autoscaling - 44% implemented +## autoscaling +44% implemented - [X] attach_instances - [X] attach_load_balancer_target_groups - [X] attach_load_balancers +- [ ] batch_delete_scheduled_action +- [ ] batch_put_scheduled_update_group_action - [ ] complete_lifecycle_action - [X] create_auto_scaling_group - [X] create_launch_configuration @@ -341,18 +582,68 @@ - [ ] resume_processes - [X] set_desired_capacity - [X] set_instance_health -- [ ] set_instance_protection +- [X] set_instance_protection - [X] suspend_processes - [ ] terminate_instance_in_auto_scaling_group - [X] update_auto_scaling_group -## autoscaling-plans - 0% implemented +## autoscaling-plans +0% implemented - [ ] create_scaling_plan - [ ] delete_scaling_plan - [ ] describe_scaling_plan_resources - [ ] describe_scaling_plans +- [ ] get_scaling_plan_resource_forecast_data +- [ ] update_scaling_plan -## batch - 93% implemented +## backup +0% implemented +- [ ] create_backup_plan +- [ ] create_backup_selection +- [ ] create_backup_vault +- [ ] delete_backup_plan +- [ ] delete_backup_selection +- [ ] delete_backup_vault +- [ ] delete_backup_vault_access_policy +- [ ] delete_backup_vault_notifications +- [ ] delete_recovery_point +- [ ] describe_backup_job +- [ ] describe_backup_vault +- [ ] describe_protected_resource +- [ ] describe_recovery_point +- [ ] describe_restore_job +- [ ] export_backup_plan_template +- [ ] get_backup_plan +- [ ] get_backup_plan_from_json +- [ ] get_backup_plan_from_template +- [ ] get_backup_selection +- [ ] get_backup_vault_access_policy +- [ ] get_backup_vault_notifications +- [ ] get_recovery_point_restore_metadata +- [ ] get_supported_resource_types +- [ ] list_backup_jobs +- [ ] list_backup_plan_templates +- [ ] list_backup_plan_versions +- [ ] list_backup_plans +- [ ] list_backup_selections +- [ ] list_backup_vaults +- [ ] list_protected_resources +- [ ] list_recovery_points_by_backup_vault +- [ ] list_recovery_points_by_resource +- [ ] list_restore_jobs +- [ ] list_tags +- [ ] put_backup_vault_access_policy +- [ ] put_backup_vault_notifications +- [ ] start_backup_job +- [ ] start_restore_job +- [ ] stop_backup_job +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_backup_plan +- [ ] update_recovery_point_lifecycle + +## batch +93% implemented - [ ] cancel_job - [X] create_compute_environment - [X] create_job_queue @@ -370,7 +661,8 @@ - [X] update_compute_environment - [X] update_job_queue -## budgets - 0% implemented +## budgets +0% implemented - [ ] create_budget - [ ] create_notification - [ ] create_subscriber @@ -378,6 +670,7 @@ - [ ] delete_notification - [ ] delete_subscriber - [ ] describe_budget +- [ ] describe_budget_performance_history - [ ] describe_budgets - [ ] describe_notifications_for_budget - [ ] describe_subscribers_for_notification @@ -385,15 +678,81 @@ - [ ] update_notification - [ ] update_subscriber -## ce - 0% implemented +## ce +0% implemented - [ ] get_cost_and_usage +- [ ] get_cost_forecast - [ ] get_dimension_values - [ ] get_reservation_coverage - [ ] get_reservation_purchase_recommendation - [ ] get_reservation_utilization +- [ ] get_rightsizing_recommendation - [ ] get_tags +- [ ] get_usage_forecast -## cloud9 - 0% implemented +## chime +0% implemented +- [ ] associate_phone_number_with_user +- [ ] associate_phone_numbers_with_voice_connector +- [ ] batch_delete_phone_number +- [ ] batch_suspend_user +- [ ] batch_unsuspend_user +- [ ] batch_update_phone_number +- [ ] batch_update_user +- [ ] create_account +- [ ] create_bot +- [ ] create_phone_number_order +- [ ] create_voice_connector +- [ ] delete_account +- [ ] delete_events_configuration +- [ ] delete_phone_number +- [ ] delete_voice_connector +- [ ] delete_voice_connector_origination +- [ ] delete_voice_connector_termination +- [ ] delete_voice_connector_termination_credentials +- [ ] disassociate_phone_number_from_user +- [ ] disassociate_phone_numbers_from_voice_connector +- [ ] get_account +- [ ] get_account_settings +- [ ] get_bot +- [ ] get_events_configuration +- [ ] get_global_settings +- [ ] get_phone_number +- [ ] get_phone_number_order +- [ ] get_user +- [ ] get_user_settings +- [ ] get_voice_connector +- [ ] get_voice_connector_origination +- [ ] get_voice_connector_termination +- [ ] get_voice_connector_termination_health +- [ ] invite_users +- [ ] list_accounts +- [ ] list_bots +- [ ] list_phone_number_orders +- [ ] list_phone_numbers +- [ ] list_users +- [ ] list_voice_connector_termination_credentials +- [ ] list_voice_connectors +- [ ] logout_user +- [ ] put_events_configuration +- [ ] put_voice_connector_origination +- [ ] put_voice_connector_termination +- [ ] put_voice_connector_termination_credentials +- [ ] regenerate_security_token +- [ ] reset_personal_pin +- [ ] restore_phone_number +- [ ] search_available_phone_numbers +- [ ] update_account +- [ ] update_account_settings +- [ ] update_bot +- [ ] update_global_settings +- [ ] update_phone_number +- [ ] update_user +- [ ] update_user_settings +- [ ] update_voice_connector + +## cloud9 +0% implemented - [ ] create_environment_ec2 - [ ] create_environment_membership - [ ] delete_environment @@ -405,7 +764,8 @@ - [ ] update_environment - [ ] update_environment_membership -## clouddirectory - 0% implemented +## clouddirectory +0% implemented - [ ] add_facet_to_object - [ ] apply_schema - [ ] attach_object @@ -434,6 +794,7 @@ - [ ] get_applied_schema_version - [ ] get_directory - [ ] get_facet +- [ ] get_link_attributes - [ ] get_object_attributes - [ ] get_object_information - [ ] get_schema_as_json @@ -446,6 +807,7 @@ - [ ] list_facet_names - [ ] list_incoming_typed_links - [ ] list_index +- [ ] list_managed_schema_arns - [ ] list_object_attributes - [ ] list_object_children - [ ] list_object_parent_paths @@ -464,13 +826,15 @@ - [ ] tag_resource - [ ] untag_resource - [ ] update_facet +- [ ] update_link_attributes - [ ] update_object_attributes - [ ] update_schema - [ ] update_typed_link_facet - [ ] upgrade_applied_schema - [ ] upgrade_published_schema -## cloudformation - 65% implemented +## cloudformation +40% implemented - [ ] cancel_update_stack - [ ] continue_update_rollback - [X] create_change_set @@ -481,46 +845,44 @@ - [X] delete_stack - [X] delete_stack_instances - [X] delete_stack_set -- [ ] deploy - [ ] describe_account_limits - [X] describe_change_set - [ ] describe_stack_drift_detection_status -- [X] describe_stack_events -- [X] describe_stack_instance -- [X] describe_stack_resource +- [ ] describe_stack_events +- [ ] describe_stack_instance +- [ ] describe_stack_resource - [ ] describe_stack_resource_drifts -- [X] describe_stack_resources -- [X] describe_stack_set -- [X] describe_stack_set_operation +- [ ] describe_stack_resources +- [ ] describe_stack_set +- [ ] describe_stack_set_operation - [X] describe_stacks - [ ] detect_stack_drift - [ ] detect_stack_resource_drift - [ ] estimate_template_cost - [X] execute_change_set - [ ] get_stack_policy -- [X] get_template +- [ ] get_template - [ ] get_template_summary - [X] list_change_sets - [X] list_exports - [ ] list_imports -- [X] list_stack_instances +- [ ] list_stack_instances - [X] list_stack_resources -- [X] list_stack_set_operation_results -- [X] list_stack_set_operations -- [X] list_stack_sets +- [ ] list_stack_set_operation_results +- [ ] list_stack_set_operations +- [ ] list_stack_sets - [X] list_stacks -- [ ] package - [ ] set_stack_policy - [ ] signal_resource -- [X] stop_stack_set_operation +- [ ] stop_stack_set_operation - [X] update_stack -- [X] update_stack_instances +- [ ] update_stack_instances - [X] update_stack_set - [ ] update_termination_protection - [X] validate_template -- [ ] wait -## cloudfront - 0% implemented +## cloudfront +0% implemented - [ ] create_cloud_front_origin_access_identity - [ ] create_distribution - [ ] create_distribution_with_tags @@ -535,7 +897,6 @@ - [ ] delete_field_level_encryption_config - [ ] delete_field_level_encryption_profile - [ ] delete_public_key -- [ ] delete_service_linked_role - [ ] delete_streaming_distribution - [ ] get_cloud_front_origin_access_identity - [ ] get_cloud_front_origin_access_identity_config @@ -568,7 +929,8 @@ - [ ] update_public_key - [ ] update_streaming_distribution -## cloudhsm - 0% implemented +## cloudhsm +0% implemented - [ ] add_tags_to_resource - [ ] create_hapg - [ ] create_hsm @@ -590,19 +952,24 @@ - [ ] modify_luna_client - [ ] remove_tags_from_resource -## cloudhsmv2 - 0% implemented +## cloudhsmv2 +0% implemented +- [ ] copy_backup_to_region - [ ] create_cluster - [ ] create_hsm +- [ ] delete_backup - [ ] delete_cluster - [ ] delete_hsm - [ ] describe_backups - [ ] describe_clusters - [ ] initialize_cluster - [ ] list_tags +- [ ] restore_backup - [ ] tag_resource - [ ] untag_resource -## cloudsearch - 0% implemented +## cloudsearch +0% implemented - [ ] build_suggesters - [ ] create_domain - [ ] define_analysis_scheme @@ -628,12 +995,14 @@ - [ ] update_scaling_parameters - [ ] update_service_access_policies -## cloudsearchdomain - 0% implemented +## cloudsearchdomain +0% implemented - [ ] search - [ ] suggest - [ ] upload_documents -## cloudtrail - 0% implemented +## cloudtrail +0% implemented - [ ] add_tags - [ ] create_trail - [ ] delete_trail @@ -649,50 +1018,68 @@ - [ ] stop_logging - [ ] update_trail -## cloudwatch - 56% implemented +## cloudwatch +39% implemented - [X] delete_alarms +- [ ] delete_anomaly_detector - [X] delete_dashboards - [ ] describe_alarm_history - [ ] describe_alarms - [ ] describe_alarms_for_metric +- [ ] describe_anomaly_detectors - [ ] disable_alarm_actions - [ ] enable_alarm_actions - [X] get_dashboard - [ ] get_metric_data - [X] get_metric_statistics +- [ ] get_metric_widget_image - [X] list_dashboards - [ ] list_metrics +- [ ] list_tags_for_resource +- [ ] put_anomaly_detector - [X] put_dashboard - [X] put_metric_alarm - [X] put_metric_data - [X] set_alarm_state +- [ ] tag_resource +- [ ] untag_resource -## codebuild - 0% implemented +## codebuild +0% implemented - [ ] batch_delete_builds - [ ] batch_get_builds - [ ] batch_get_projects - [ ] create_project - [ ] create_webhook - [ ] delete_project +- [ ] delete_source_credentials - [ ] delete_webhook +- [ ] import_source_credentials - [ ] invalidate_project_cache - [ ] list_builds - [ ] list_builds_for_project - [ ] list_curated_environment_images - [ ] list_projects +- [ ] list_source_credentials - [ ] start_build - [ ] stop_build - [ ] update_project - [ ] update_webhook -## codecommit - 0% implemented +## codecommit +0% implemented +- [ ] batch_describe_merge_conflicts - [ ] batch_get_repositories - [ ] create_branch +- [ ] create_commit - [ ] create_pull_request - [ ] create_repository +- [ ] create_unreferenced_merge_commit - [ ] delete_branch - [ ] delete_comment_content +- [ ] delete_file - [ ] delete_repository +- [ ] describe_merge_conflicts - [ ] describe_pull_request_events - [ ] get_blob - [ ] get_branch @@ -701,20 +1088,32 @@ - [ ] get_comments_for_pull_request - [ ] get_commit - [ ] get_differences +- [ ] get_file +- [ ] get_folder +- [ ] get_merge_commit - [ ] get_merge_conflicts +- [ ] get_merge_options - [ ] get_pull_request - [ ] get_repository - [ ] get_repository_triggers - [ ] list_branches - [ ] list_pull_requests - [ ] list_repositories +- [ ] list_tags_for_resource +- [ ] merge_branches_by_fast_forward +- [ ] merge_branches_by_squash +- [ ] merge_branches_by_three_way - [ ] merge_pull_request_by_fast_forward +- [ ] merge_pull_request_by_squash +- [ ] merge_pull_request_by_three_way - [ ] post_comment_for_compared_commit - [ ] post_comment_for_pull_request - [ ] post_comment_reply - [ ] put_file - [ ] put_repository_triggers +- [ ] tag_resource - [ ] test_repository_triggers +- [ ] untag_resource - [ ] update_comment - [ ] update_default_branch - [ ] update_pull_request_description @@ -723,12 +1122,14 @@ - [ ] update_repository_description - [ ] update_repository_name -## codedeploy - 0% implemented +## codedeploy +0% implemented - [ ] add_tags_to_on_premises_instances - [ ] batch_get_application_revisions - [ ] batch_get_applications - [ ] batch_get_deployment_groups - [ ] batch_get_deployment_instances +- [ ] batch_get_deployment_targets - [ ] batch_get_deployments - [ ] batch_get_on_premises_instances - [ ] continue_deployment @@ -747,31 +1148,39 @@ - [ ] get_deployment_config - [ ] get_deployment_group - [ ] get_deployment_instance +- [ ] get_deployment_target - [ ] get_on_premises_instance - [ ] list_application_revisions - [ ] list_applications - [ ] list_deployment_configs - [ ] list_deployment_groups - [ ] list_deployment_instances +- [ ] list_deployment_targets - [ ] list_deployments - [ ] list_git_hub_account_token_names - [ ] list_on_premises_instances +- [ ] list_tags_for_resource - [ ] put_lifecycle_event_hook_execution_status - [ ] register_application_revision - [ ] register_on_premises_instance - [ ] remove_tags_from_on_premises_instances - [ ] skip_wait_time_for_instance_termination - [ ] stop_deployment +- [ ] tag_resource +- [ ] untag_resource - [ ] update_application - [ ] update_deployment_group -## codepipeline - 0% implemented +## codepipeline +0% implemented - [ ] acknowledge_job - [ ] acknowledge_third_party_job - [ ] create_custom_action_type - [ ] create_pipeline - [ ] delete_custom_action_type - [ ] delete_pipeline +- [ ] delete_webhook +- [ ] deregister_webhook_with_third_party - [ ] disable_stage_transition - [ ] enable_stage_transition - [ ] get_job_details @@ -779,9 +1188,12 @@ - [ ] get_pipeline_execution - [ ] get_pipeline_state - [ ] get_third_party_job_details +- [ ] list_action_executions - [ ] list_action_types - [ ] list_pipeline_executions - [ ] list_pipelines +- [ ] list_tags_for_resource +- [ ] list_webhooks - [ ] poll_for_jobs - [ ] poll_for_third_party_jobs - [ ] put_action_revision @@ -790,11 +1202,16 @@ - [ ] put_job_success_result - [ ] put_third_party_job_failure_result - [ ] put_third_party_job_success_result +- [ ] put_webhook +- [ ] register_webhook_with_third_party - [ ] retry_stage_execution - [ ] start_pipeline_execution +- [ ] tag_resource +- [ ] untag_resource - [ ] update_pipeline -## codestar - 0% implemented +## codestar +0% implemented - [ ] associate_team_member - [ ] create_project - [ ] create_user_profile @@ -814,7 +1231,8 @@ - [ ] update_team_member - [ ] update_user_profile -## cognito-identity - 0% implemented +## cognito-identity +23% implemented - [X] create_identity_pool - [ ] delete_identities - [ ] delete_identity_pool @@ -827,14 +1245,18 @@ - [X] get_open_id_token_for_developer_identity - [ ] list_identities - [ ] list_identity_pools +- [ ] list_tags_for_resource - [ ] lookup_developer_identity - [ ] merge_developer_identities - [ ] set_identity_pool_roles +- [ ] tag_resource - [ ] unlink_developer_identity - [ ] unlink_identity +- [ ] untag_resource - [ ] update_identity_pool -## cognito-idp - 34% implemented +## cognito-idp +37% implemented - [ ] add_custom_attributes - [X] admin_add_user_to_group - [ ] admin_confirm_sign_up @@ -856,6 +1278,7 @@ - [ ] admin_reset_user_password - [ ] admin_respond_to_auth_challenge - [ ] admin_set_user_mfa_preference +- [ ] admin_set_user_password - [ ] admin_set_user_settings - [ ] admin_update_auth_event_feedback - [ ] admin_update_device_status @@ -905,6 +1328,7 @@ - [X] list_groups - [X] list_identity_providers - [ ] list_resource_servers +- [ ] list_tags_for_resource - [ ] list_user_import_jobs - [X] list_user_pool_clients - [X] list_user_pools @@ -920,10 +1344,12 @@ - [ ] sign_up - [ ] start_user_import_job - [ ] stop_user_import_job +- [ ] tag_resource +- [ ] untag_resource - [ ] update_auth_event_feedback - [ ] update_device_status - [ ] update_group -- [x] update_identity_provider +- [X] update_identity_provider - [ ] update_resource_server - [ ] update_user_attributes - [ ] update_user_pool @@ -932,7 +1358,8 @@ - [ ] verify_software_token - [ ] verify_user_attribute -## cognito-sync - 0% implemented +## cognito-sync +0% implemented - [ ] bulk_publish - [ ] delete_dataset - [ ] describe_dataset @@ -951,71 +1378,155 @@ - [ ] unsubscribe_from_dataset - [ ] update_records -## comprehend - 0% implemented +## comprehend +0% implemented - [ ] batch_detect_dominant_language - [ ] batch_detect_entities - [ ] batch_detect_key_phrases - [ ] batch_detect_sentiment +- [ ] batch_detect_syntax +- [ ] create_document_classifier +- [ ] create_entity_recognizer +- [ ] delete_document_classifier +- [ ] delete_entity_recognizer +- [ ] describe_document_classification_job +- [ ] describe_document_classifier +- [ ] describe_dominant_language_detection_job +- [ ] describe_entities_detection_job +- [ ] describe_entity_recognizer +- [ ] describe_key_phrases_detection_job +- [ ] describe_sentiment_detection_job - [ ] describe_topics_detection_job - [ ] detect_dominant_language - [ ] detect_entities - [ ] detect_key_phrases - [ ] detect_sentiment +- [ ] detect_syntax +- [ ] list_document_classification_jobs +- [ ] list_document_classifiers +- [ ] list_dominant_language_detection_jobs +- [ ] list_entities_detection_jobs +- [ ] list_entity_recognizers +- [ ] list_key_phrases_detection_jobs +- [ ] list_sentiment_detection_jobs +- [ ] list_tags_for_resource - [ ] list_topics_detection_jobs +- [ ] start_document_classification_job +- [ ] start_dominant_language_detection_job +- [ ] start_entities_detection_job +- [ ] start_key_phrases_detection_job +- [ ] start_sentiment_detection_job - [ ] start_topics_detection_job +- [ ] stop_dominant_language_detection_job +- [ ] stop_entities_detection_job +- [ ] stop_key_phrases_detection_job +- [ ] stop_sentiment_detection_job +- [ ] stop_training_document_classifier +- [ ] stop_training_entity_recognizer +- [ ] tag_resource +- [ ] untag_resource -## config - 0% implemented +## comprehendmedical +0% implemented +- [ ] detect_entities +- [ ] detect_phi + +## config +24% implemented +- [ ] batch_get_aggregate_resource_config - [ ] batch_get_resource_config -- [ ] delete_aggregation_authorization +- [X] delete_aggregation_authorization - [ ] delete_config_rule -- [ ] delete_configuration_aggregator -- [ ] delete_configuration_recorder -- [ ] delete_delivery_channel +- [X] delete_configuration_aggregator +- [X] delete_configuration_recorder +- [X] delete_delivery_channel - [ ] delete_evaluation_results +- [ ] delete_organization_config_rule - [ ] delete_pending_aggregation_request +- [ ] delete_remediation_configuration +- [ ] delete_retention_configuration - [ ] deliver_config_snapshot - [ ] describe_aggregate_compliance_by_config_rules -- [ ] describe_aggregation_authorizations +- [X] describe_aggregation_authorizations - [ ] describe_compliance_by_config_rule - [ ] describe_compliance_by_resource - [ ] describe_config_rule_evaluation_status - [ ] describe_config_rules - [ ] describe_configuration_aggregator_sources_status -- [ ] describe_configuration_aggregators -- [ ] describe_configuration_recorder_status -- [ ] describe_configuration_recorders +- [X] describe_configuration_aggregators +- [X] describe_configuration_recorder_status +- [X] describe_configuration_recorders - [ ] describe_delivery_channel_status -- [ ] describe_delivery_channels +- [X] describe_delivery_channels +- [ ] describe_organization_config_rule_statuses +- [ ] describe_organization_config_rules - [ ] describe_pending_aggregation_requests +- [ ] describe_remediation_configurations +- [ ] describe_remediation_execution_status +- [ ] describe_retention_configurations - [ ] get_aggregate_compliance_details_by_config_rule - [ ] get_aggregate_config_rule_compliance_summary +- [ ] get_aggregate_discovered_resource_counts +- [ ] get_aggregate_resource_config - [ ] get_compliance_details_by_config_rule - [ ] get_compliance_details_by_resource - [ ] get_compliance_summary_by_config_rule - [ ] get_compliance_summary_by_resource_type - [ ] get_discovered_resource_counts +- [ ] get_organization_config_rule_detailed_status - [ ] get_resource_config_history +- [ ] list_aggregate_discovered_resources - [ ] list_discovered_resources -- [ ] put_aggregation_authorization +- [ ] list_tags_for_resource +- [X] put_aggregation_authorization - [ ] put_config_rule -- [ ] put_configuration_aggregator -- [ ] put_configuration_recorder -- [ ] put_delivery_channel +- [X] put_configuration_aggregator +- [X] put_configuration_recorder +- [X] put_delivery_channel - [ ] put_evaluations +- [ ] put_organization_config_rule +- [ ] put_remediation_configurations +- [ ] put_retention_configuration +- [ ] select_resource_config - [ ] start_config_rules_evaluation -- [ ] start_configuration_recorder -- [ ] stop_configuration_recorder +- [X] start_configuration_recorder +- [ ] start_remediation_execution +- [X] stop_configuration_recorder +- [ ] tag_resource +- [ ] untag_resource -## connect - 0% implemented +## connect +0% implemented +- [ ] create_user +- [ ] delete_user +- [ ] describe_user +- [ ] describe_user_hierarchy_group +- [ ] describe_user_hierarchy_structure +- [ ] get_contact_attributes +- [ ] get_current_metric_data +- [ ] get_federation_token +- [ ] get_metric_data +- [ ] list_routing_profiles +- [ ] list_security_profiles +- [ ] list_user_hierarchy_groups +- [ ] list_users - [ ] start_outbound_voice_contact - [ ] stop_contact +- [ ] update_contact_attributes +- [ ] update_user_hierarchy +- [ ] update_user_identity_info +- [ ] update_user_phone_config +- [ ] update_user_routing_profile +- [ ] update_user_security_profiles -## cur - 0% implemented +## cur +0% implemented - [ ] delete_report_definition - [ ] describe_report_definitions - [ ] put_report_definition -## datapipeline - 42% implemented +## datapipeline +42% implemented - [X] activate_pipeline - [ ] add_tags - [X] create_pipeline @@ -1036,7 +1547,36 @@ - [ ] set_task_status - [ ] validate_pipeline_definition -## dax - 0% implemented +## datasync +0% implemented +- [ ] cancel_task_execution +- [ ] create_agent +- [ ] create_location_efs +- [ ] create_location_nfs +- [ ] create_location_s3 +- [ ] create_task +- [ ] delete_agent +- [ ] delete_location +- [ ] delete_task +- [ ] describe_agent +- [ ] describe_location_efs +- [ ] describe_location_nfs +- [ ] describe_location_s3 +- [ ] describe_task +- [ ] describe_task_execution +- [ ] list_agents +- [ ] list_locations +- [ ] list_tags_for_resource +- [ ] list_task_executions +- [ ] list_tasks +- [ ] start_task_execution +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_agent +- [ ] update_task + +## dax +0% implemented - [ ] create_cluster - [ ] create_parameter_group - [ ] create_subnet_group @@ -1059,13 +1599,15 @@ - [ ] update_parameter_group - [ ] update_subnet_group -## devicefarm - 0% implemented +## devicefarm +0% implemented - [ ] create_device_pool - [ ] create_instance_profile - [ ] create_network_profile - [ ] create_project - [ ] create_remote_access_session - [ ] create_upload +- [ ] create_vpce_configuration - [ ] delete_device_pool - [ ] delete_instance_profile - [ ] delete_network_profile @@ -1073,6 +1615,7 @@ - [ ] delete_remote_access_session - [ ] delete_run - [ ] delete_upload +- [ ] delete_vpce_configuration - [ ] get_account_settings - [ ] get_device - [ ] get_device_instance @@ -1088,6 +1631,7 @@ - [ ] get_suite - [ ] get_test - [ ] get_upload +- [ ] get_vpce_configuration - [ ] install_to_remote_access_session - [ ] list_artifacts - [ ] list_device_instances @@ -1104,49 +1648,64 @@ - [ ] list_runs - [ ] list_samples - [ ] list_suites +- [ ] list_tags_for_resource - [ ] list_tests - [ ] list_unique_problems - [ ] list_uploads +- [ ] list_vpce_configurations - [ ] purchase_offering - [ ] renew_offering - [ ] schedule_run +- [ ] stop_job - [ ] stop_remote_access_session - [ ] stop_run +- [ ] tag_resource +- [ ] untag_resource - [ ] update_device_instance - [ ] update_device_pool - [ ] update_instance_profile - [ ] update_network_profile - [ ] update_project +- [ ] update_upload +- [ ] update_vpce_configuration -## directconnect - 0% implemented +## directconnect +0% implemented +- [ ] accept_direct_connect_gateway_association_proposal - [ ] allocate_connection_on_interconnect - [ ] allocate_hosted_connection - [ ] allocate_private_virtual_interface - [ ] allocate_public_virtual_interface +- [ ] allocate_transit_virtual_interface - [ ] associate_connection_with_lag - [ ] associate_hosted_connection - [ ] associate_virtual_interface - [ ] confirm_connection - [ ] confirm_private_virtual_interface - [ ] confirm_public_virtual_interface +- [ ] confirm_transit_virtual_interface - [ ] create_bgp_peer - [ ] create_connection - [ ] create_direct_connect_gateway - [ ] create_direct_connect_gateway_association +- [ ] create_direct_connect_gateway_association_proposal - [ ] create_interconnect - [ ] create_lag - [ ] create_private_virtual_interface - [ ] create_public_virtual_interface +- [ ] create_transit_virtual_interface - [ ] delete_bgp_peer - [ ] delete_connection - [ ] delete_direct_connect_gateway - [ ] delete_direct_connect_gateway_association +- [ ] delete_direct_connect_gateway_association_proposal - [ ] delete_interconnect - [ ] delete_lag - [ ] delete_virtual_interface - [ ] describe_connection_loa - [ ] describe_connections - [ ] describe_connections_on_interconnect +- [ ] describe_direct_connect_gateway_association_proposals - [ ] describe_direct_connect_gateway_associations - [ ] describe_direct_connect_gateway_attachments - [ ] describe_direct_connect_gateways @@ -1162,31 +1721,50 @@ - [ ] disassociate_connection_from_lag - [ ] tag_resource - [ ] untag_resource +- [ ] update_direct_connect_gateway_association - [ ] update_lag +- [ ] update_virtual_interface_attributes -## discovery - 0% implemented +## discovery +0% implemented - [ ] associate_configuration_items_to_application +- [ ] batch_delete_import_data - [ ] create_application - [ ] create_tags - [ ] delete_applications - [ ] delete_tags - [ ] describe_agents - [ ] describe_configurations +- [ ] describe_continuous_exports - [ ] describe_export_configurations - [ ] describe_export_tasks +- [ ] describe_import_tasks - [ ] describe_tags - [ ] disassociate_configuration_items_from_application - [ ] export_configurations - [ ] get_discovery_summary - [ ] list_configurations - [ ] list_server_neighbors +- [ ] start_continuous_export - [ ] start_data_collection_by_agent_ids - [ ] start_export_task +- [ ] start_import_task +- [ ] stop_continuous_export - [ ] stop_data_collection_by_agent_ids - [ ] update_application -## dms - 0% implemented +## dlm +0% implemented +- [ ] create_lifecycle_policy +- [ ] delete_lifecycle_policy +- [ ] get_lifecycle_policies +- [ ] get_lifecycle_policy +- [ ] update_lifecycle_policy + +## dms +0% implemented - [ ] add_tags_to_resource +- [ ] apply_pending_maintenance_action - [ ] create_endpoint - [ ] create_event_subscription - [ ] create_replication_instance @@ -1207,6 +1785,7 @@ - [ ] describe_event_subscriptions - [ ] describe_events - [ ] describe_orderable_replication_instances +- [ ] describe_pending_maintenance_actions - [ ] describe_refresh_schemas_status - [ ] describe_replication_instance_task_logs - [ ] describe_replication_instances @@ -1231,7 +1810,53 @@ - [ ] stop_replication_task - [ ] test_connection -## ds - 0% implemented +## docdb +0% implemented +- [ ] add_tags_to_resource +- [ ] apply_pending_maintenance_action +- [ ] copy_db_cluster_parameter_group +- [ ] copy_db_cluster_snapshot +- [ ] create_db_cluster +- [ ] create_db_cluster_parameter_group +- [ ] create_db_cluster_snapshot +- [ ] create_db_instance +- [ ] create_db_subnet_group +- [ ] delete_db_cluster +- [ ] delete_db_cluster_parameter_group +- [ ] delete_db_cluster_snapshot +- [ ] delete_db_instance +- [ ] delete_db_subnet_group +- [ ] describe_db_cluster_parameter_groups +- [ ] describe_db_cluster_parameters +- [ ] describe_db_cluster_snapshot_attributes +- [ ] describe_db_cluster_snapshots +- [ ] describe_db_clusters +- [ ] describe_db_engine_versions +- [ ] describe_db_instances +- [ ] describe_db_subnet_groups +- [ ] describe_engine_default_cluster_parameters +- [ ] describe_event_categories +- [ ] describe_events +- [ ] describe_orderable_db_instance_options +- [ ] describe_pending_maintenance_actions +- [ ] failover_db_cluster +- [ ] list_tags_for_resource +- [ ] modify_db_cluster +- [ ] modify_db_cluster_parameter_group +- [ ] modify_db_cluster_snapshot_attribute +- [ ] modify_db_instance +- [ ] modify_db_subnet_group +- [ ] reboot_db_instance +- [ ] remove_tags_from_resource +- [ ] reset_db_cluster_parameter_group +- [ ] restore_db_cluster_from_snapshot +- [ ] restore_db_cluster_to_point_in_time +- [ ] start_db_cluster +- [ ] stop_db_cluster + +## ds +0% implemented +- [ ] accept_shared_directory - [ ] add_ip_routes - [ ] add_tags_to_resource - [ ] cancel_schema_extension @@ -1240,11 +1865,13 @@ - [ ] create_computer - [ ] create_conditional_forwarder - [ ] create_directory +- [ ] create_log_subscription - [ ] create_microsoft_ad - [ ] create_snapshot - [ ] create_trust - [ ] delete_conditional_forwarder - [ ] delete_directory +- [ ] delete_log_subscription - [ ] delete_snapshot - [ ] delete_trust - [ ] deregister_event_topic @@ -1252,6 +1879,7 @@ - [ ] describe_directories - [ ] describe_domain_controllers - [ ] describe_event_topics +- [ ] describe_shared_directories - [ ] describe_snapshots - [ ] describe_trusts - [ ] disable_radius @@ -1261,19 +1889,26 @@ - [ ] get_directory_limits - [ ] get_snapshot_limits - [ ] list_ip_routes +- [ ] list_log_subscriptions - [ ] list_schema_extensions - [ ] list_tags_for_resource - [ ] register_event_topic +- [ ] reject_shared_directory - [ ] remove_ip_routes - [ ] remove_tags_from_resource +- [ ] reset_user_password - [ ] restore_from_snapshot +- [ ] share_directory - [ ] start_schema_extension +- [ ] unshare_directory - [ ] update_conditional_forwarder - [ ] update_number_of_domain_controllers - [ ] update_radius +- [ ] update_trust - [ ] verify_trust -## dynamodb - 22% implemented +## dynamodb +19% implemented - [ ] batch_get_item - [ ] batch_write_item - [ ] create_backup @@ -1284,7 +1919,9 @@ - [X] delete_table - [ ] describe_backup - [ ] describe_continuous_backups +- [ ] describe_endpoints - [ ] describe_global_table +- [ ] describe_global_table_settings - [ ] describe_limits - [ ] describe_table - [ ] describe_time_to_live @@ -1299,42 +1936,54 @@ - [ ] restore_table_to_point_in_time - [X] scan - [ ] tag_resource +- [ ] transact_get_items +- [ ] transact_write_items - [ ] untag_resource - [ ] update_continuous_backups - [ ] update_global_table +- [ ] update_global_table_settings - [ ] update_item - [ ] update_table - [ ] update_time_to_live -## dynamodbstreams - 0% implemented -- [ ] describe_stream -- [ ] get_records -- [ ] get_shard_iterator -- [ ] list_streams +## dynamodbstreams +100% implemented +- [X] describe_stream +- [X] get_records +- [X] get_shard_iterator +- [X] list_streams -## ec2 - 37% implemented +## ec2 +28% implemented - [ ] accept_reserved_instances_exchange_quote +- [ ] accept_transit_gateway_vpc_attachment - [ ] accept_vpc_endpoint_connections - [X] accept_vpc_peering_connection +- [ ] advertise_byoip_cidr - [X] allocate_address - [ ] allocate_hosts +- [ ] apply_security_groups_to_client_vpn_target_network - [ ] assign_ipv6_addresses - [ ] assign_private_ip_addresses - [X] associate_address +- [ ] associate_client_vpn_target_network - [X] associate_dhcp_options - [ ] associate_iam_instance_profile - [X] associate_route_table - [ ] associate_subnet_cidr_block +- [ ] associate_transit_gateway_route_table - [X] associate_vpc_cidr_block - [ ] attach_classic_link_vpc - [X] attach_internet_gateway - [X] attach_network_interface - [X] attach_volume - [X] attach_vpn_gateway +- [ ] authorize_client_vpn_ingress - [X] authorize_security_group_egress - [X] authorize_security_group_ingress - [ ] bundle_instance - [ ] cancel_bundle_task +- [ ] cancel_capacity_reservation - [ ] cancel_conversion_task - [ ] cancel_export_task - [ ] cancel_import_task @@ -1345,18 +1994,22 @@ - [ ] copy_fpga_image - [X] copy_image - [X] copy_snapshot +- [ ] create_capacity_reservation +- [ ] create_client_vpn_endpoint +- [ ] create_client_vpn_route - [X] create_customer_gateway - [ ] create_default_subnet - [ ] create_default_vpc - [X] create_dhcp_options - [ ] create_egress_only_internet_gateway +- [ ] create_fleet - [ ] create_flow_logs - [ ] create_fpga_image - [X] create_image - [ ] create_instance_export_task - [X] create_internet_gateway - [X] create_key_pair -- [ ] create_launch_template +- [X] create_launch_template - [ ] create_launch_template_version - [X] create_nat_gateway - [X] create_network_acl @@ -1369,9 +2022,18 @@ - [X] create_route_table - [X] create_security_group - [X] create_snapshot +- [ ] create_snapshots - [ ] create_spot_datafeed_subscription - [X] create_subnet - [X] create_tags +- [ ] create_traffic_mirror_filter +- [ ] create_traffic_mirror_filter_rule +- [ ] create_traffic_mirror_session +- [ ] create_traffic_mirror_target +- [ ] create_transit_gateway +- [ ] create_transit_gateway_route +- [ ] create_transit_gateway_route_table +- [ ] create_transit_gateway_vpc_attachment - [X] create_volume - [X] create_vpc - [ ] create_vpc_endpoint @@ -1381,9 +2043,12 @@ - [X] create_vpn_connection - [ ] create_vpn_connection_route - [X] create_vpn_gateway +- [ ] delete_client_vpn_endpoint +- [ ] delete_client_vpn_route - [X] delete_customer_gateway - [ ] delete_dhcp_options - [ ] delete_egress_only_internet_gateway +- [ ] delete_fleets - [ ] delete_flow_logs - [ ] delete_fpga_image - [X] delete_internet_gateway @@ -1403,6 +2068,14 @@ - [ ] delete_spot_datafeed_subscription - [X] delete_subnet - [X] delete_tags +- [ ] delete_traffic_mirror_filter +- [ ] delete_traffic_mirror_filter_rule +- [ ] delete_traffic_mirror_session +- [ ] delete_traffic_mirror_target +- [ ] delete_transit_gateway +- [ ] delete_transit_gateway_route +- [ ] delete_transit_gateway_route_table +- [ ] delete_transit_gateway_vpc_attachment - [X] delete_volume - [X] delete_vpc - [ ] delete_vpc_endpoint_connection_notifications @@ -1412,19 +2085,30 @@ - [X] delete_vpn_connection - [ ] delete_vpn_connection_route - [X] delete_vpn_gateway +- [ ] deprovision_byoip_cidr - [X] deregister_image - [ ] describe_account_attributes - [X] describe_addresses - [ ] describe_aggregate_id_format - [X] describe_availability_zones - [ ] describe_bundle_tasks +- [ ] describe_byoip_cidrs +- [ ] describe_capacity_reservations - [ ] describe_classic_link_instances +- [ ] describe_client_vpn_authorization_rules +- [ ] describe_client_vpn_connections +- [ ] describe_client_vpn_endpoints +- [ ] describe_client_vpn_routes +- [ ] describe_client_vpn_target_networks - [ ] describe_conversion_tasks - [ ] describe_customer_gateways - [X] describe_dhcp_options - [ ] describe_egress_only_internet_gateways - [ ] describe_elastic_gpus - [ ] describe_export_tasks +- [ ] describe_fleet_history +- [ ] describe_fleet_instances +- [ ] describe_fleets - [ ] describe_flow_logs - [ ] describe_fpga_image_attribute - [ ] describe_fpga_images @@ -1455,6 +2139,7 @@ - [ ] describe_placement_groups - [ ] describe_prefix_lists - [ ] describe_principal_id_format +- [ ] describe_public_ipv4_pools - [X] describe_regions - [ ] describe_reserved_instances - [ ] describe_reserved_instances_listings @@ -1474,8 +2159,15 @@ - [X] describe_spot_instance_requests - [ ] describe_spot_price_history - [ ] describe_stale_security_groups -- [X] describe_subnets +- [ ] describe_subnets - [X] describe_tags +- [ ] describe_traffic_mirror_filters +- [ ] describe_traffic_mirror_sessions +- [ ] describe_traffic_mirror_targets +- [ ] describe_transit_gateway_attachments +- [ ] describe_transit_gateway_route_tables +- [ ] describe_transit_gateway_vpc_attachments +- [ ] describe_transit_gateways - [ ] describe_volume_attribute - [ ] describe_volume_status - [X] describe_volumes @@ -1498,36 +2190,58 @@ - [X] detach_network_interface - [X] detach_volume - [X] detach_vpn_gateway +- [ ] disable_ebs_encryption_by_default +- [ ] disable_transit_gateway_route_table_propagation - [ ] disable_vgw_route_propagation - [ ] disable_vpc_classic_link - [ ] disable_vpc_classic_link_dns_support - [X] disassociate_address +- [ ] disassociate_client_vpn_target_network - [ ] disassociate_iam_instance_profile - [X] disassociate_route_table - [ ] disassociate_subnet_cidr_block +- [ ] disassociate_transit_gateway_route_table - [X] disassociate_vpc_cidr_block +- [ ] enable_ebs_encryption_by_default +- [ ] enable_transit_gateway_route_table_propagation - [ ] enable_vgw_route_propagation - [ ] enable_volume_io - [ ] enable_vpc_classic_link - [ ] enable_vpc_classic_link_dns_support +- [ ] export_client_vpn_client_certificate_revocation_list +- [ ] export_client_vpn_client_configuration +- [ ] export_transit_gateway_routes +- [ ] get_capacity_reservation_usage - [ ] get_console_output - [ ] get_console_screenshot +- [ ] get_ebs_default_kms_key_id +- [ ] get_ebs_encryption_by_default - [ ] get_host_reservation_purchase_preview - [ ] get_launch_template_data - [ ] get_password_data - [ ] get_reserved_instances_exchange_quote +- [ ] get_transit_gateway_attachment_propagations +- [ ] get_transit_gateway_route_table_associations +- [ ] get_transit_gateway_route_table_propagations +- [ ] import_client_vpn_client_certificate_revocation_list - [ ] import_image - [ ] import_instance - [X] import_key_pair - [ ] import_snapshot - [ ] import_volume +- [ ] modify_capacity_reservation +- [ ] modify_client_vpn_endpoint +- [ ] modify_ebs_default_kms_key_id +- [ ] modify_fleet - [ ] modify_fpga_image_attribute - [ ] modify_hosts - [ ] modify_id_format - [ ] modify_identity_id_format - [ ] modify_image_attribute - [X] modify_instance_attribute +- [ ] modify_instance_capacity_reservation_attributes - [ ] modify_instance_credit_specification +- [ ] modify_instance_event_start_time - [ ] modify_instance_placement - [ ] modify_launch_template - [X] modify_network_interface_attribute @@ -1535,6 +2249,10 @@ - [ ] modify_snapshot_attribute - [X] modify_spot_fleet_request - [X] modify_subnet_attribute +- [ ] modify_traffic_mirror_filter_network_services +- [ ] modify_traffic_mirror_filter_rule +- [ ] modify_traffic_mirror_session +- [ ] modify_transit_gateway_vpc_attachment - [ ] modify_volume - [ ] modify_volume_attribute - [X] modify_vpc_attribute @@ -1544,13 +2262,16 @@ - [ ] modify_vpc_endpoint_service_permissions - [ ] modify_vpc_peering_connection_options - [ ] modify_vpc_tenancy +- [ ] modify_vpn_connection - [ ] monitor_instances - [ ] move_address_to_vpc +- [ ] provision_byoip_cidr - [ ] purchase_host_reservation - [ ] purchase_reserved_instances_offering - [ ] purchase_scheduled_instances - [X] reboot_instances - [ ] register_image +- [ ] reject_transit_gateway_vpc_attachment - [ ] reject_vpc_endpoint_connections - [X] reject_vpc_peering_connection - [X] release_address @@ -1560,29 +2281,40 @@ - [X] replace_network_acl_entry - [X] replace_route - [X] replace_route_table_association +- [ ] replace_transit_gateway_route - [ ] report_instance_status - [X] request_spot_fleet - [X] request_spot_instances +- [ ] reset_ebs_default_kms_key_id - [ ] reset_fpga_image_attribute - [ ] reset_image_attribute - [ ] reset_instance_attribute - [ ] reset_network_interface_attribute - [ ] reset_snapshot_attribute - [ ] restore_address_to_classic +- [ ] revoke_client_vpn_ingress - [X] revoke_security_group_egress - [X] revoke_security_group_ingress - [ ] run_instances - [ ] run_scheduled_instances +- [ ] search_transit_gateway_routes - [X] start_instances - [X] stop_instances +- [ ] terminate_client_vpn_connections - [X] terminate_instances - [ ] unassign_ipv6_addresses - [ ] unassign_private_ip_addresses - [ ] unmonitor_instances - [ ] update_security_group_rule_descriptions_egress - [ ] update_security_group_rule_descriptions_ingress +- [ ] withdraw_byoip_cidr -## ecr - 36% implemented +## ec2-instance-connect +0% implemented +- [ ] send_ssh_public_key + +## ecr +30% implemented - [ ] batch_check_layer_availability - [X] batch_delete_image - [X] batch_get_image @@ -1600,46 +2332,65 @@ - [ ] get_repository_policy - [ ] initiate_layer_upload - [X] list_images +- [ ] list_tags_for_resource - [X] put_image +- [ ] put_image_tag_mutability - [ ] put_lifecycle_policy - [ ] set_repository_policy - [ ] start_lifecycle_policy_preview +- [ ] tag_resource +- [ ] untag_resource - [ ] upload_layer_part -## ecs - 87% implemented +## ecs +63% implemented - [X] create_cluster - [X] create_service +- [ ] create_task_set +- [ ] delete_account_setting - [X] delete_attributes - [X] delete_cluster - [X] delete_service +- [ ] delete_task_set - [X] deregister_container_instance - [X] deregister_task_definition - [X] describe_clusters - [X] describe_container_instances - [X] describe_services - [X] describe_task_definition +- [ ] describe_task_sets - [X] describe_tasks - [ ] discover_poll_endpoint +- [ ] list_account_settings - [X] list_attributes - [X] list_clusters - [X] list_container_instances - [X] list_services +- [X] list_tags_for_resource - [X] list_task_definition_families - [X] list_task_definitions - [X] list_tasks +- [ ] put_account_setting +- [ ] put_account_setting_default - [X] put_attributes - [X] register_container_instance - [X] register_task_definition - [X] run_task - [X] start_task - [X] stop_task +- [ ] submit_attachment_state_changes - [ ] submit_container_state_change - [ ] submit_task_state_change +- [ ] tag_resource +- [ ] untag_resource - [ ] update_container_agent - [X] update_container_instances_state - [X] update_service +- [ ] update_service_primary_task_set +- [ ] update_task_set -## efs - 0% implemented +## efs +0% implemented - [ ] create_file_system - [ ] create_mount_target - [ ] create_tags @@ -1647,14 +2398,31 @@ - [ ] delete_mount_target - [ ] delete_tags - [ ] describe_file_systems +- [ ] describe_lifecycle_configuration - [ ] describe_mount_target_security_groups - [ ] describe_mount_targets - [ ] describe_tags - [ ] modify_mount_target_security_groups +- [ ] put_lifecycle_configuration +- [ ] update_file_system -## elasticache - 0% implemented +## eks +0% implemented +- [ ] create_cluster +- [ ] delete_cluster +- [ ] describe_cluster +- [ ] describe_update +- [ ] list_clusters +- [ ] list_updates +- [ ] update_cluster_config +- [ ] update_cluster_version + +## elasticache +0% implemented - [ ] add_tags_to_resource - [ ] authorize_cache_security_group_ingress +- [ ] batch_apply_update_action +- [ ] batch_stop_update_action - [ ] copy_snapshot - [ ] create_cache_cluster - [ ] create_cache_parameter_group @@ -1662,6 +2430,7 @@ - [ ] create_cache_subnet_group - [ ] create_replication_group - [ ] create_snapshot +- [ ] decrease_replica_count - [ ] delete_cache_cluster - [ ] delete_cache_parameter_group - [ ] delete_cache_security_group @@ -1679,7 +2448,10 @@ - [ ] describe_replication_groups - [ ] describe_reserved_cache_nodes - [ ] describe_reserved_cache_nodes_offerings +- [ ] describe_service_updates - [ ] describe_snapshots +- [ ] describe_update_actions +- [ ] increase_replica_count - [ ] list_allowed_node_type_modifications - [ ] list_tags_for_resource - [ ] modify_cache_cluster @@ -1694,7 +2466,8 @@ - [ ] revoke_cache_security_group_ingress - [ ] test_failover -## elasticbeanstalk - 0% implemented +## elasticbeanstalk +0% implemented - [ ] abort_environment_update - [ ] apply_environment_managed_action - [ ] check_dns_availability @@ -1740,7 +2513,8 @@ - [ ] update_tags_for_resource - [ ] validate_configuration_settings -## elastictranscoder - 0% implemented +## elastictranscoder +0% implemented - [ ] cancel_job - [ ] create_job - [ ] create_pipeline @@ -1759,7 +2533,8 @@ - [ ] update_pipeline_notifications - [ ] update_pipeline_status -## elb - 34% implemented +## elb +34% implemented - [ ] add_tags - [X] apply_security_groups_to_load_balancer - [ ] attach_load_balancer_to_subnets @@ -1790,7 +2565,8 @@ - [ ] set_load_balancer_policies_for_backend_server - [X] set_load_balancer_policies_of_listener -## elbv2 - 70% implemented +## elbv2 +70% implemented - [ ] add_listener_certificates - [ ] add_tags - [X] create_listener @@ -1826,7 +2602,8 @@ - [X] set_security_groups - [X] set_subnets -## emr - 55% implemented +## emr +55% implemented - [ ] add_instance_fleet - [X] add_instance_groups - [X] add_job_flow_steps @@ -1855,8 +2632,10 @@ - [X] set_visible_to_all_users - [X] terminate_job_flows -## es - 0% implemented +## es +0% implemented - [ ] add_tags +- [ ] cancel_elasticsearch_service_software_update - [ ] create_elasticsearch_domain - [ ] delete_elasticsearch_domain - [ ] delete_elasticsearch_service_role @@ -1864,40 +2643,72 @@ - [ ] describe_elasticsearch_domain_config - [ ] describe_elasticsearch_domains - [ ] describe_elasticsearch_instance_type_limits +- [ ] describe_reserved_elasticsearch_instance_offerings +- [ ] describe_reserved_elasticsearch_instances +- [ ] get_compatible_elasticsearch_versions +- [ ] get_upgrade_history +- [ ] get_upgrade_status - [ ] list_domain_names - [ ] list_elasticsearch_instance_types - [ ] list_elasticsearch_versions - [ ] list_tags +- [ ] purchase_reserved_elasticsearch_instance_offering - [ ] remove_tags +- [ ] start_elasticsearch_service_software_update - [ ] update_elasticsearch_domain_config +- [ ] upgrade_elasticsearch_domain -## events - 100% implemented +## events +48% implemented +- [ ] activate_event_source +- [ ] create_event_bus +- [ ] create_partner_event_source +- [ ] deactivate_event_source +- [ ] delete_event_bus +- [ ] delete_partner_event_source - [X] delete_rule - [X] describe_event_bus +- [ ] describe_event_source +- [ ] describe_partner_event_source - [X] describe_rule - [X] disable_rule - [X] enable_rule +- [ ] list_event_buses +- [ ] list_event_sources +- [ ] list_partner_event_source_accounts +- [ ] list_partner_event_sources - [X] list_rule_names_by_target - [X] list_rules +- [ ] list_tags_for_resource - [X] list_targets_by_rule - [X] put_events +- [ ] put_partner_events - [X] put_permission - [X] put_rule - [X] put_targets - [X] remove_permission - [X] remove_targets +- [ ] tag_resource - [X] test_event_pattern +- [ ] untag_resource -## firehose - 0% implemented +## firehose +0% implemented - [ ] create_delivery_stream - [ ] delete_delivery_stream - [ ] describe_delivery_stream - [ ] list_delivery_streams +- [ ] list_tags_for_delivery_stream - [ ] put_record - [ ] put_record_batch +- [ ] start_delivery_stream_encryption +- [ ] stop_delivery_stream_encryption +- [ ] tag_delivery_stream +- [ ] untag_delivery_stream - [ ] update_destination -## fms - 0% implemented +## fms +0% implemented - [ ] associate_admin_account - [ ] delete_notification_channel - [ ] delete_policy @@ -1906,12 +2717,29 @@ - [ ] get_compliance_detail - [ ] get_notification_channel - [ ] get_policy +- [ ] get_protection_status - [ ] list_compliance_status +- [ ] list_member_accounts - [ ] list_policies - [ ] put_notification_channel - [ ] put_policy -## gamelift - 0% implemented +## fsx +0% implemented +- [ ] create_backup +- [ ] create_file_system +- [ ] create_file_system_from_backup +- [ ] delete_backup +- [ ] delete_file_system +- [ ] describe_backups +- [ ] describe_file_systems +- [ ] list_tags_for_resource +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_file_system + +## gamelift +0% implemented - [ ] accept_match - [ ] create_alias - [ ] create_build @@ -1922,6 +2750,7 @@ - [ ] create_matchmaking_rule_set - [ ] create_player_session - [ ] create_player_sessions +- [ ] create_script - [ ] create_vpc_peering_authorization - [ ] create_vpc_peering_connection - [ ] delete_alias @@ -1929,7 +2758,9 @@ - [ ] delete_fleet - [ ] delete_game_session_queue - [ ] delete_matchmaking_configuration +- [ ] delete_matchmaking_rule_set - [ ] delete_scaling_policy +- [ ] delete_script - [ ] delete_vpc_peering_authorization - [ ] delete_vpc_peering_connection - [ ] describe_alias @@ -1951,6 +2782,7 @@ - [ ] describe_player_sessions - [ ] describe_runtime_configuration - [ ] describe_scaling_policies +- [ ] describe_script - [ ] describe_vpc_peering_authorizations - [ ] describe_vpc_peering_connections - [ ] get_game_session_log_url @@ -1958,13 +2790,16 @@ - [ ] list_aliases - [ ] list_builds - [ ] list_fleets +- [ ] list_scripts - [ ] put_scaling_policy - [ ] request_upload_credentials - [ ] resolve_alias - [ ] search_game_sessions +- [ ] start_fleet_actions - [ ] start_game_session_placement - [ ] start_match_backfill - [ ] start_matchmaking +- [ ] stop_fleet_actions - [ ] stop_game_session_placement - [ ] stop_matchmaking - [ ] update_alias @@ -1976,9 +2811,11 @@ - [ ] update_game_session_queue - [ ] update_matchmaking_configuration - [ ] update_runtime_configuration +- [ ] update_script - [ ] validate_matchmaking_rule_set -## glacier - 12% implemented +## glacier +12% implemented - [ ] abort_multipart_upload - [ ] abort_vault_lock - [ ] add_tags_to_vault @@ -2013,36 +2850,67 @@ - [ ] upload_archive - [ ] upload_multipart_part -## glue - 23% implemented -- [x] batch_create_partition +## globalaccelerator +0% implemented +- [ ] create_accelerator +- [ ] create_endpoint_group +- [ ] create_listener +- [ ] delete_accelerator +- [ ] delete_endpoint_group +- [ ] delete_listener +- [ ] describe_accelerator +- [ ] describe_accelerator_attributes +- [ ] describe_endpoint_group +- [ ] describe_listener +- [ ] list_accelerators +- [ ] list_endpoint_groups +- [ ] list_listeners +- [ ] update_accelerator +- [ ] update_accelerator_attributes +- [ ] update_endpoint_group +- [ ] update_listener + +## glue +5% implemented +- [ ] batch_create_partition - [ ] batch_delete_connection -- [x] batch_delete_partition -- [x] batch_delete_table +- [ ] batch_delete_partition +- [ ] batch_delete_table - [ ] batch_delete_table_version +- [ ] batch_get_crawlers +- [ ] batch_get_dev_endpoints +- [ ] batch_get_jobs - [ ] batch_get_partition +- [ ] batch_get_triggers +- [ ] batch_get_workflows - [ ] batch_stop_job_run - [ ] create_classifier - [ ] create_connection - [ ] create_crawler -- [x] create_database +- [X] create_database - [ ] create_dev_endpoint - [ ] create_job -- [x] create_partition +- [ ] create_partition - [ ] create_script -- [x] create_table +- [ ] create_security_configuration +- [X] create_table - [ ] create_trigger - [ ] create_user_defined_function +- [ ] create_workflow - [ ] delete_classifier - [ ] delete_connection - [ ] delete_crawler - [ ] delete_database - [ ] delete_dev_endpoint - [ ] delete_job -- [x] delete_partition -- [x] delete_table +- [ ] delete_partition +- [ ] delete_resource_policy +- [ ] delete_security_configuration +- [X] delete_table - [ ] delete_table_version - [ ] delete_trigger - [ ] delete_user_defined_function +- [ ] delete_workflow - [ ] get_catalog_import_status - [ ] get_classifier - [ ] get_classifiers @@ -2051,36 +2919,58 @@ - [ ] get_crawler - [ ] get_crawler_metrics - [ ] get_crawlers -- [x] get_database +- [ ] get_data_catalog_encryption_settings +- [X] get_database - [ ] get_databases - [ ] get_dataflow_graph - [ ] get_dev_endpoint - [ ] get_dev_endpoints - [ ] get_job +- [ ] get_job_bookmark +- [ ] get_job_bookmarks - [ ] get_job_run - [ ] get_job_runs - [ ] get_jobs - [ ] get_mapping -- [x] get_partition -- [x] get_partitions +- [ ] get_partition +- [ ] get_partitions - [ ] get_plan -- [x] get_table -- [x] get_table_version -- [x] get_table_versions -- [x] get_tables +- [ ] get_resource_policy +- [ ] get_security_configuration +- [ ] get_security_configurations +- [X] get_table +- [ ] get_table_version +- [ ] get_table_versions +- [X] get_tables +- [ ] get_tags - [ ] get_trigger - [ ] get_triggers - [ ] get_user_defined_function - [ ] get_user_defined_functions +- [ ] get_workflow +- [ ] get_workflow_run +- [ ] get_workflow_run_properties +- [ ] get_workflow_runs - [ ] import_catalog_to_glue +- [ ] list_crawlers +- [ ] list_dev_endpoints +- [ ] list_jobs +- [ ] list_triggers +- [ ] list_workflows +- [ ] put_data_catalog_encryption_settings +- [ ] put_resource_policy +- [ ] put_workflow_run_properties - [ ] reset_job_bookmark - [ ] start_crawler - [ ] start_crawler_schedule - [ ] start_job_run - [ ] start_trigger +- [ ] start_workflow_run - [ ] stop_crawler - [ ] stop_crawler_schedule - [ ] stop_trigger +- [ ] tag_resource +- [ ] untag_resource - [ ] update_classifier - [ ] update_connection - [ ] update_crawler @@ -2088,14 +2978,18 @@ - [ ] update_database - [ ] update_dev_endpoint - [ ] update_job -- [x] update_partition -- [x] update_table +- [ ] update_partition +- [ ] update_table - [ ] update_trigger - [ ] update_user_defined_function +- [ ] update_workflow -## greengrass - 0% implemented +## greengrass +0% implemented - [ ] associate_role_to_group - [ ] associate_service_role_to_account +- [ ] create_connector_definition +- [ ] create_connector_definition_version - [ ] create_core_definition - [ ] create_core_definition_version - [ ] create_deployment @@ -2113,6 +3007,7 @@ - [ ] create_software_update_job - [ ] create_subscription_definition - [ ] create_subscription_definition_version +- [ ] delete_connector_definition - [ ] delete_core_definition - [ ] delete_device_definition - [ ] delete_function_definition @@ -2123,7 +3018,10 @@ - [ ] disassociate_role_from_group - [ ] disassociate_service_role_from_account - [ ] get_associated_role +- [ ] get_bulk_deployment_status - [ ] get_connectivity_info +- [ ] get_connector_definition +- [ ] get_connector_definition_version - [ ] get_core_definition - [ ] get_core_definition_version - [ ] get_deployment_status @@ -2142,6 +3040,10 @@ - [ ] get_service_role_for_account - [ ] get_subscription_definition - [ ] get_subscription_definition_version +- [ ] list_bulk_deployment_detailed_reports +- [ ] list_bulk_deployments +- [ ] list_connector_definition_versions +- [ ] list_connector_definitions - [ ] list_core_definition_versions - [ ] list_core_definitions - [ ] list_deployments @@ -2158,8 +3060,14 @@ - [ ] list_resource_definitions - [ ] list_subscription_definition_versions - [ ] list_subscription_definitions +- [ ] list_tags_for_resource - [ ] reset_deployments +- [ ] start_bulk_deployment +- [ ] stop_bulk_deployment +- [ ] tag_resource +- [ ] untag_resource - [ ] update_connectivity_info +- [ ] update_connector_definition - [ ] update_core_definition - [ ] update_device_definition - [ ] update_function_definition @@ -2169,16 +3077,47 @@ - [ ] update_resource_definition - [ ] update_subscription_definition -## guardduty - 0% implemented +## groundstation +0% implemented +- [ ] cancel_contact +- [ ] create_config +- [ ] create_dataflow_endpoint_group +- [ ] create_mission_profile +- [ ] delete_config +- [ ] delete_dataflow_endpoint_group +- [ ] delete_mission_profile +- [ ] describe_contact +- [ ] get_config +- [ ] get_dataflow_endpoint_group +- [ ] get_minute_usage +- [ ] get_mission_profile +- [ ] get_satellite +- [ ] list_configs +- [ ] list_contacts +- [ ] list_dataflow_endpoint_groups +- [ ] list_ground_stations +- [ ] list_mission_profiles +- [ ] list_satellites +- [ ] list_tags_for_resource +- [ ] reserve_contact +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_config +- [ ] update_mission_profile + +## guardduty +0% implemented - [ ] accept_invitation - [ ] archive_findings - [ ] create_detector +- [ ] create_filter - [ ] create_ip_set - [ ] create_members - [ ] create_sample_findings - [ ] create_threat_intel_set - [ ] decline_invitations - [ ] delete_detector +- [ ] delete_filter - [ ] delete_invitations - [ ] delete_ip_set - [ ] delete_members @@ -2186,6 +3125,7 @@ - [ ] disassociate_from_master_account - [ ] disassociate_members - [ ] get_detector +- [ ] get_filter - [ ] get_findings - [ ] get_findings_statistics - [ ] get_invitations_count @@ -2195,20 +3135,26 @@ - [ ] get_threat_intel_set - [ ] invite_members - [ ] list_detectors +- [ ] list_filters - [ ] list_findings - [ ] list_invitations - [ ] list_ip_sets - [ ] list_members +- [ ] list_tags_for_resource - [ ] list_threat_intel_sets - [ ] start_monitoring_members - [ ] stop_monitoring_members +- [ ] tag_resource - [ ] unarchive_findings +- [ ] untag_resource - [ ] update_detector +- [ ] update_filter - [ ] update_findings_feedback - [ ] update_ip_set - [ ] update_threat_intel_set -## health - 0% implemented +## health +0% implemented - [ ] describe_affected_entities - [ ] describe_entity_aggregates - [ ] describe_event_aggregates @@ -2216,7 +3162,8 @@ - [ ] describe_event_types - [ ] describe_events -## iam - 62% implemented +## iam +55% implemented - [ ] add_client_id_to_open_id_connect_provider - [X] add_role_to_instance_profile - [X] add_user_to_group @@ -2250,6 +3197,7 @@ - [ ] delete_policy - [X] delete_policy_version - [X] delete_role +- [ ] delete_role_permissions_boundary - [X] delete_role_policy - [X] delete_saml_provider - [X] delete_server_certificate @@ -2258,6 +3206,7 @@ - [X] delete_signing_certificate - [ ] delete_ssh_public_key - [X] delete_user +- [ ] delete_user_permissions_boundary - [X] delete_user_policy - [ ] delete_virtual_mfa_device - [X] detach_group_policy @@ -2265,6 +3214,8 @@ - [X] detach_user_policy - [X] enable_mfa_device - [ ] generate_credential_report +- [ ] generate_organizations_access_report +- [ ] generate_service_last_accessed_details - [X] get_access_key_last_used - [X] get_account_authorization_details - [ ] get_account_password_policy @@ -2277,17 +3228,20 @@ - [X] get_instance_profile - [X] get_login_profile - [ ] get_open_id_connect_provider +- [ ] get_organizations_access_report - [X] get_policy - [X] get_policy_version - [X] get_role - [X] get_role_policy - [X] get_saml_provider - [X] get_server_certificate +- [ ] get_service_last_accessed_details +- [ ] get_service_last_accessed_details_with_entities - [ ] get_service_linked_role_deletion_status - [ ] get_ssh_public_key - [X] get_user - [X] get_user_policy -- [X] list_access_keys +- [ ] list_access_keys - [X] list_account_aliases - [X] list_attached_group_policies - [X] list_attached_role_policies @@ -2295,27 +3249,30 @@ - [ ] list_entities_for_policy - [X] list_group_policies - [X] list_groups -- [X] list_groups_for_user -- [X] list_instance_profiles -- [X] list_instance_profiles_for_role +- [ ] list_groups_for_user +- [ ] list_instance_profiles +- [ ] list_instance_profiles_for_role - [X] list_mfa_devices - [ ] list_open_id_connect_providers - [X] list_policies +- [ ] list_policies_granting_service_access - [X] list_policy_versions - [X] list_role_policies -- [X] list_roles - [X] list_role_tags -- [ ] list_user_tags +- [X] list_roles - [X] list_saml_providers -- [X] list_server_certificates +- [ ] list_server_certificates - [ ] list_service_specific_credentials - [X] list_signing_certificates - [ ] list_ssh_public_keys - [X] list_user_policies +- [ ] list_user_tags - [X] list_users - [ ] list_virtual_mfa_devices - [X] put_group_policy +- [ ] put_role_permissions_boundary - [X] put_role_policy +- [ ] put_user_permissions_boundary - [X] put_user_policy - [ ] remove_client_id_from_open_id_connect_provider - [X] remove_role_from_instance_profile @@ -2323,6 +3280,7 @@ - [ ] reset_service_specific_credential - [ ] resync_mfa_device - [ ] set_default_policy_version +- [ ] set_security_token_service_preferences - [ ] simulate_custom_policy - [ ] simulate_principal_policy - [X] tag_role @@ -2335,19 +3293,20 @@ - [ ] update_group - [X] update_login_profile - [ ] update_open_id_connect_provider_thumbprint -- [ ] update_role -- [ ] update_role_description +- [X] update_role +- [X] update_role_description - [X] update_saml_provider - [ ] update_server_certificate - [ ] update_service_specific_credential - [X] update_signing_certificate - [ ] update_ssh_public_key - [X] update_user -- [X] upload_server_certificate +- [ ] upload_server_certificate - [X] upload_signing_certificate - [ ] upload_ssh_public_key -## importexport - 0% implemented +## importexport +0% implemented - [ ] cancel_job - [ ] create_job - [ ] get_shipping_label @@ -2355,10 +3314,12 @@ - [ ] list_jobs - [ ] update_job -## inspector - 0% implemented +## inspector +0% implemented - [ ] add_attributes_to_findings - [ ] create_assessment_target - [ ] create_assessment_template +- [ ] create_exclusions_preview - [ ] create_resource_group - [ ] delete_assessment_run - [ ] delete_assessment_target @@ -2367,16 +3328,19 @@ - [ ] describe_assessment_targets - [ ] describe_assessment_templates - [ ] describe_cross_account_access_role +- [ ] describe_exclusions - [ ] describe_findings - [ ] describe_resource_groups - [ ] describe_rules_packages - [ ] get_assessment_report +- [ ] get_exclusions_preview - [ ] get_telemetry_metadata - [ ] list_assessment_run_agents - [ ] list_assessment_runs - [ ] list_assessment_targets - [ ] list_assessment_templates - [ ] list_event_subscriptions +- [ ] list_exclusions - [ ] list_findings - [ ] list_rules_packages - [ ] list_tags_for_resource @@ -2390,37 +3354,53 @@ - [ ] unsubscribe_from_event - [ ] update_assessment_target -## iot - 33% implemented +## iot +24% implemented - [ ] accept_certificate_transfer +- [ ] add_thing_to_billing_group - [X] add_thing_to_thing_group - [ ] associate_targets_with_job - [X] attach_policy - [X] attach_principal_policy +- [ ] attach_security_profile - [X] attach_thing_principal +- [ ] cancel_audit_task - [ ] cancel_certificate_transfer - [ ] cancel_job +- [ ] cancel_job_execution - [ ] clear_default_authorizer - [ ] create_authorizer +- [ ] create_billing_group - [ ] create_certificate_from_csr +- [ ] create_dynamic_thing_group - [X] create_job - [X] create_keys_and_certificate - [ ] create_ota_update - [X] create_policy - [ ] create_policy_version - [ ] create_role_alias +- [ ] create_scheduled_audit +- [ ] create_security_profile - [ ] create_stream - [X] create_thing - [X] create_thing_group - [X] create_thing_type - [ ] create_topic_rule +- [ ] delete_account_audit_configuration - [ ] delete_authorizer +- [ ] delete_billing_group - [ ] delete_ca_certificate - [X] delete_certificate +- [ ] delete_dynamic_thing_group +- [ ] delete_job +- [ ] delete_job_execution - [ ] delete_ota_update - [X] delete_policy - [ ] delete_policy_version - [ ] delete_registration_code - [ ] delete_role_alias +- [ ] delete_scheduled_audit +- [ ] delete_security_profile - [ ] delete_stream - [X] delete_thing - [X] delete_thing_group @@ -2428,7 +3408,10 @@ - [ ] delete_topic_rule - [ ] delete_v2_logging_level - [ ] deprecate_thing_type +- [ ] describe_account_audit_configuration +- [ ] describe_audit_task - [ ] describe_authorizer +- [ ] describe_billing_group - [ ] describe_ca_certificate - [X] describe_certificate - [ ] describe_default_authorizer @@ -2438,6 +3421,8 @@ - [X] describe_job - [ ] describe_job_execution - [ ] describe_role_alias +- [ ] describe_scheduled_audit +- [ ] describe_security_profile - [ ] describe_stream - [X] describe_thing - [X] describe_thing_group @@ -2445,6 +3430,7 @@ - [X] describe_thing_type - [X] detach_policy - [X] detach_principal_policy +- [ ] detach_security_profile - [X] detach_thing_principal - [ ] disable_topic_rule - [ ] enable_topic_rule @@ -2456,10 +3442,15 @@ - [X] get_policy - [ ] get_policy_version - [ ] get_registration_code +- [ ] get_statistics - [ ] get_topic_rule - [ ] get_v2_logging_options +- [ ] list_active_violations - [ ] list_attached_policies +- [ ] list_audit_findings +- [ ] list_audit_tasks - [ ] list_authorizers +- [ ] list_billing_groups - [ ] list_ca_certificates - [X] list_certificates - [ ] list_certificates_by_ca @@ -2475,8 +3466,13 @@ - [X] list_principal_policies - [X] list_principal_things - [ ] list_role_aliases +- [ ] list_scheduled_audits +- [ ] list_security_profiles +- [ ] list_security_profiles_for_target - [ ] list_streams +- [ ] list_tags_for_resource - [ ] list_targets_for_policy +- [ ] list_targets_for_security_profile - [X] list_thing_groups - [X] list_thing_groups_for_thing - [X] list_thing_principals @@ -2484,13 +3480,16 @@ - [ ] list_thing_registration_tasks - [X] list_thing_types - [X] list_things +- [ ] list_things_in_billing_group - [X] list_things_in_thing_group - [ ] list_topic_rules - [ ] list_v2_logging_levels +- [ ] list_violation_events - [ ] register_ca_certificate - [X] register_certificate - [ ] register_thing - [ ] reject_certificate_transfer +- [ ] remove_thing_from_billing_group - [X] remove_thing_from_thing_group - [ ] replace_topic_rule - [ ] search_index @@ -2499,41 +3498,213 @@ - [ ] set_logging_options - [ ] set_v2_logging_level - [ ] set_v2_logging_options +- [ ] start_on_demand_audit_task - [ ] start_thing_registration_task - [ ] stop_thing_registration_task +- [ ] tag_resource - [ ] test_authorization - [ ] test_invoke_authorizer - [ ] transfer_certificate +- [ ] untag_resource +- [ ] update_account_audit_configuration - [ ] update_authorizer +- [ ] update_billing_group - [ ] update_ca_certificate - [X] update_certificate +- [ ] update_dynamic_thing_group - [ ] update_event_configurations - [ ] update_indexing_configuration +- [ ] update_job - [ ] update_role_alias +- [ ] update_scheduled_audit +- [ ] update_security_profile - [ ] update_stream - [X] update_thing - [X] update_thing_group - [X] update_thing_groups_for_thing +- [ ] validate_security_profile_behaviors -## iot-data - 0% implemented -- [ ] delete_thing_shadow -- [ ] get_thing_shadow -- [ ] publish -- [ ] update_thing_shadow +## iot-data +100% implemented +- [X] delete_thing_shadow +- [X] get_thing_shadow +- [X] publish +- [X] update_thing_shadow -## iot-jobs-data - 0% implemented +## iot-jobs-data +0% implemented - [ ] describe_job_execution - [ ] get_pending_job_executions - [ ] start_next_pending_job_execution - [ ] update_job_execution -## kinesis - 61% implemented +## iot1click-devices +0% implemented +- [ ] claim_devices_by_claim_code +- [ ] describe_device +- [ ] finalize_device_claim +- [ ] get_device_methods +- [ ] initiate_device_claim +- [ ] invoke_device_method +- [ ] list_device_events +- [ ] list_devices +- [ ] list_tags_for_resource +- [ ] tag_resource +- [ ] unclaim_device +- [ ] untag_resource +- [ ] update_device_state + +## iot1click-projects +0% implemented +- [ ] associate_device_with_placement +- [ ] create_placement +- [ ] create_project +- [ ] delete_placement +- [ ] delete_project +- [ ] describe_placement +- [ ] describe_project +- [ ] disassociate_device_from_placement +- [ ] get_devices_in_placement +- [ ] list_placements +- [ ] list_projects +- [ ] list_tags_for_resource +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_placement +- [ ] update_project + +## iotanalytics +0% implemented +- [ ] batch_put_message +- [ ] cancel_pipeline_reprocessing +- [ ] create_channel +- [ ] create_dataset +- [ ] create_dataset_content +- [ ] create_datastore +- [ ] create_pipeline +- [ ] delete_channel +- [ ] delete_dataset +- [ ] delete_dataset_content +- [ ] delete_datastore +- [ ] delete_pipeline +- [ ] describe_channel +- [ ] describe_dataset +- [ ] describe_datastore +- [ ] describe_logging_options +- [ ] describe_pipeline +- [ ] get_dataset_content +- [ ] list_channels +- [ ] list_dataset_contents +- [ ] list_datasets +- [ ] list_datastores +- [ ] list_pipelines +- [ ] list_tags_for_resource +- [ ] put_logging_options +- [ ] run_pipeline_activity +- [ ] sample_channel_data +- [ ] start_pipeline_reprocessing +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_channel +- [ ] update_dataset +- [ ] update_datastore +- [ ] update_pipeline + +## iotevents +0% implemented +- [ ] create_detector_model +- [ ] create_input +- [ ] delete_detector_model +- [ ] delete_input +- [ ] describe_detector_model +- [ ] describe_input +- [ ] describe_logging_options +- [ ] list_detector_model_versions +- [ ] list_detector_models +- [ ] list_inputs +- [ ] list_tags_for_resource +- [ ] put_logging_options +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_detector_model +- [ ] update_input + +## iotevents-data +0% implemented +- [ ] batch_put_message +- [ ] batch_update_detector +- [ ] describe_detector +- [ ] list_detectors + +## iotthingsgraph +0% implemented +- [ ] associate_entity_to_thing +- [ ] create_flow_template +- [ ] create_system_instance +- [ ] create_system_template +- [ ] delete_flow_template +- [ ] delete_namespace +- [ ] delete_system_instance +- [ ] delete_system_template +- [ ] deploy_system_instance +- [ ] deprecate_flow_template +- [ ] deprecate_system_template +- [ ] describe_namespace +- [ ] dissociate_entity_from_thing +- [ ] get_entities +- [ ] get_flow_template +- [ ] get_flow_template_revisions +- [ ] get_namespace_deletion_status +- [ ] get_system_instance +- [ ] get_system_template +- [ ] get_system_template_revisions +- [ ] get_upload_status +- [ ] list_flow_execution_messages +- [ ] list_tags_for_resource +- [ ] search_entities +- [ ] search_flow_executions +- [ ] search_flow_templates +- [ ] search_system_instances +- [ ] search_system_templates +- [ ] search_things +- [ ] tag_resource +- [ ] undeploy_system_instance +- [ ] untag_resource +- [ ] update_flow_template +- [ ] update_system_template +- [ ] upload_entity_definitions + +## kafka +0% implemented +- [ ] create_cluster +- [ ] create_configuration +- [ ] delete_cluster +- [ ] describe_cluster +- [ ] describe_cluster_operation +- [ ] describe_configuration +- [ ] describe_configuration_revision +- [ ] get_bootstrap_brokers +- [ ] list_cluster_operations +- [ ] list_clusters +- [ ] list_configuration_revisions +- [ ] list_configurations +- [ ] list_nodes +- [ ] list_tags_for_resource +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_broker_storage +- [ ] update_cluster_configuration + +## kinesis +50% implemented - [X] add_tags_to_stream - [X] create_stream - [ ] decrease_stream_retention_period - [X] delete_stream +- [ ] deregister_stream_consumer - [ ] describe_limits - [X] describe_stream +- [ ] describe_stream_consumer - [X] describe_stream_summary - [ ] disable_enhanced_monitoring - [ ] enable_enhanced_monitoring @@ -2541,25 +3712,33 @@ - [X] get_shard_iterator - [ ] increase_stream_retention_period - [ ] list_shards +- [ ] list_stream_consumers - [X] list_streams - [X] list_tags_for_stream - [X] merge_shards - [X] put_record - [X] put_records +- [ ] register_stream_consumer - [X] remove_tags_from_stream - [X] split_shard - [ ] start_stream_encryption - [ ] stop_stream_encryption +- [ ] subscribe_to_shard - [ ] update_shard_count -## kinesis-video-archived-media - 0% implemented +## kinesis-video-archived-media +0% implemented +- [ ] get_dash_streaming_session_url +- [ ] get_hls_streaming_session_url - [ ] get_media_for_fragment_list - [ ] list_fragments -## kinesis-video-media - 0% implemented +## kinesis-video-media +0% implemented - [ ] get_media -## kinesisanalytics - 0% implemented +## kinesisanalytics +0% implemented - [ ] add_application_cloud_watch_logging_option - [ ] add_application_input - [ ] add_application_input_processing_configuration @@ -2574,11 +3753,42 @@ - [ ] describe_application - [ ] discover_input_schema - [ ] list_applications +- [ ] list_tags_for_resource - [ ] start_application - [ ] stop_application +- [ ] tag_resource +- [ ] untag_resource - [ ] update_application -## kinesisvideo - 0% implemented +## kinesisanalyticsv2 +0% implemented +- [ ] add_application_cloud_watch_logging_option +- [ ] add_application_input +- [ ] add_application_input_processing_configuration +- [ ] add_application_output +- [ ] add_application_reference_data_source +- [ ] create_application +- [ ] create_application_snapshot +- [ ] delete_application +- [ ] delete_application_cloud_watch_logging_option +- [ ] delete_application_input_processing_configuration +- [ ] delete_application_output +- [ ] delete_application_reference_data_source +- [ ] delete_application_snapshot +- [ ] describe_application +- [ ] describe_application_snapshot +- [ ] discover_input_schema +- [ ] list_application_snapshots +- [ ] list_applications +- [ ] list_tags_for_resource +- [ ] start_application +- [ ] stop_application +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_application + +## kinesisvideo +0% implemented - [ ] create_stream - [ ] delete_stream - [ ] describe_stream @@ -2590,21 +3800,27 @@ - [ ] update_data_retention - [ ] update_stream -## kms - 25% implemented -- [ ] cancel_key_deletion +## kms +41% implemented +- [X] cancel_key_deletion +- [ ] connect_custom_key_store - [ ] create_alias +- [ ] create_custom_key_store - [ ] create_grant - [X] create_key - [ ] decrypt - [X] delete_alias +- [ ] delete_custom_key_store - [ ] delete_imported_key_material +- [ ] describe_custom_key_stores - [X] describe_key -- [ ] disable_key +- [X] disable_key - [X] disable_key_rotation -- [ ] enable_key +- [ ] disconnect_custom_key_store +- [X] enable_key - [X] enable_key_rotation - [ ] encrypt -- [ ] generate_data_key +- [X] generate_data_key - [ ] generate_data_key_without_plaintext - [ ] generate_random - [X] get_key_policy @@ -2615,19 +3831,22 @@ - [ ] list_grants - [ ] list_key_policies - [X] list_keys -- [ ] list_resource_tags +- [X] list_resource_tags - [ ] list_retirable_grants - [X] put_key_policy - [ ] re_encrypt - [ ] retire_grant - [ ] revoke_grant -- [ ] schedule_key_deletion -- [ ] tag_resource +- [X] schedule_key_deletion +- [X] tag_resource - [ ] untag_resource - [ ] update_alias -- [ ] update_key_description +- [ ] update_custom_key_store +- [X] update_key_description -## lambda - 0% implemented +## lambda +0% implemented +- [ ] add_layer_version_permission - [ ] add_permission - [ ] create_alias - [ ] create_event_source_mapping @@ -2636,21 +3855,29 @@ - [ ] delete_event_source_mapping - [ ] delete_function - [ ] delete_function_concurrency +- [ ] delete_layer_version - [ ] get_account_settings - [ ] get_alias - [ ] get_event_source_mapping - [ ] get_function - [ ] get_function_configuration +- [ ] get_layer_version +- [ ] get_layer_version_by_arn +- [ ] get_layer_version_policy - [ ] get_policy - [ ] invoke - [ ] invoke_async - [ ] list_aliases - [ ] list_event_source_mappings - [ ] list_functions +- [ ] list_layer_versions +- [ ] list_layers - [ ] list_tags - [ ] list_versions_by_function +- [ ] publish_layer_version - [ ] publish_version - [ ] put_function_concurrency +- [ ] remove_layer_version_permission - [ ] remove_permission - [ ] tag_resource - [ ] untag_resource @@ -2659,7 +3886,8 @@ - [ ] update_function_code - [ ] update_function_configuration -## lex-models - 0% implemented +## lex-models +0% implemented - [ ] create_bot_version - [ ] create_intent_version - [ ] create_slot_type_version @@ -2697,17 +3925,39 @@ - [ ] put_slot_type - [ ] start_import -## lex-runtime - 0% implemented +## lex-runtime +0% implemented - [ ] post_content - [ ] post_text -## lightsail - 0% implemented +## license-manager +0% implemented +- [ ] create_license_configuration +- [ ] delete_license_configuration +- [ ] get_license_configuration +- [ ] get_service_settings +- [ ] list_associations_for_license_configuration +- [ ] list_license_configurations +- [ ] list_license_specifications_for_resource +- [ ] list_resource_inventory +- [ ] list_tags_for_resource +- [ ] list_usage_for_license_configuration +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_license_configuration +- [ ] update_license_specifications_for_resource +- [ ] update_service_settings + +## lightsail +0% implemented - [ ] allocate_static_ip - [ ] attach_disk - [ ] attach_instances_to_load_balancer - [ ] attach_load_balancer_tls_certificate - [ ] attach_static_ip - [ ] close_instance_public_ports +- [ ] copy_snapshot +- [ ] create_cloud_formation_stack - [ ] create_disk - [ ] create_disk_from_snapshot - [ ] create_disk_snapshot @@ -2719,6 +3969,9 @@ - [ ] create_key_pair - [ ] create_load_balancer - [ ] create_load_balancer_tls_certificate +- [ ] create_relational_database +- [ ] create_relational_database_from_snapshot +- [ ] create_relational_database_snapshot - [ ] delete_disk - [ ] delete_disk_snapshot - [ ] delete_domain @@ -2726,21 +3979,27 @@ - [ ] delete_instance - [ ] delete_instance_snapshot - [ ] delete_key_pair +- [ ] delete_known_host_keys - [ ] delete_load_balancer - [ ] delete_load_balancer_tls_certificate +- [ ] delete_relational_database +- [ ] delete_relational_database_snapshot - [ ] detach_disk - [ ] detach_instances_from_load_balancer - [ ] detach_static_ip - [ ] download_default_key_pair +- [ ] export_snapshot - [ ] get_active_names - [ ] get_blueprints - [ ] get_bundles +- [ ] get_cloud_formation_stack_records - [ ] get_disk - [ ] get_disk_snapshot - [ ] get_disk_snapshots - [ ] get_disks - [ ] get_domain - [ ] get_domains +- [ ] get_export_snapshot_records - [ ] get_instance - [ ] get_instance_access_details - [ ] get_instance_metric_data @@ -2759,6 +4018,18 @@ - [ ] get_operations - [ ] get_operations_for_resource - [ ] get_regions +- [ ] get_relational_database +- [ ] get_relational_database_blueprints +- [ ] get_relational_database_bundles +- [ ] get_relational_database_events +- [ ] get_relational_database_log_events +- [ ] get_relational_database_log_streams +- [ ] get_relational_database_master_user_password +- [ ] get_relational_database_metric_data +- [ ] get_relational_database_parameters +- [ ] get_relational_database_snapshot +- [ ] get_relational_database_snapshots +- [ ] get_relational_databases - [ ] get_static_ip - [ ] get_static_ips - [ ] import_key_pair @@ -2767,14 +4038,22 @@ - [ ] peer_vpc - [ ] put_instance_public_ports - [ ] reboot_instance +- [ ] reboot_relational_database - [ ] release_static_ip - [ ] start_instance +- [ ] start_relational_database - [ ] stop_instance +- [ ] stop_relational_database +- [ ] tag_resource - [ ] unpeer_vpc +- [ ] untag_resource - [ ] update_domain_entry - [ ] update_load_balancer_attribute +- [ ] update_relational_database +- [ ] update_relational_database_parameters -## logs - 27% implemented +## logs +28% implemented - [ ] associate_kms_key - [ ] cancel_export_task - [ ] create_export_task @@ -2785,31 +4064,38 @@ - [X] delete_log_stream - [ ] delete_metric_filter - [ ] delete_resource_policy -- [ ] delete_retention_policy +- [X] delete_retention_policy - [ ] delete_subscription_filter - [ ] describe_destinations - [ ] describe_export_tasks - [X] describe_log_groups - [X] describe_log_streams - [ ] describe_metric_filters +- [ ] describe_queries - [ ] describe_resource_policies - [ ] describe_subscription_filters - [ ] disassociate_kms_key - [X] filter_log_events - [X] get_log_events +- [ ] get_log_group_fields +- [ ] get_log_record +- [ ] get_query_results - [ ] list_tags_log_group - [ ] put_destination - [ ] put_destination_policy - [X] put_log_events - [ ] put_metric_filter - [ ] put_resource_policy -- [ ] put_retention_policy +- [X] put_retention_policy - [ ] put_subscription_filter +- [ ] start_query +- [ ] stop_query - [ ] tag_log_group - [ ] test_metric_filter - [ ] untag_log_group -## machinelearning - 0% implemented +## machinelearning +0% implemented - [ ] add_tags - [ ] create_batch_prediction - [ ] create_data_source_from_rds @@ -2839,14 +4125,69 @@ - [ ] update_evaluation - [ ] update_ml_model -## marketplace-entitlement - 0% implemented +## macie +0% implemented +- [ ] associate_member_account +- [ ] associate_s3_resources +- [ ] disassociate_member_account +- [ ] disassociate_s3_resources +- [ ] list_member_accounts +- [ ] list_s3_resources +- [ ] update_s3_resources + +## managedblockchain +0% implemented +- [ ] create_member +- [ ] create_network +- [ ] create_node +- [ ] create_proposal +- [ ] delete_member +- [ ] delete_node +- [ ] get_member +- [ ] get_network +- [ ] get_node +- [ ] get_proposal +- [ ] list_invitations +- [ ] list_members +- [ ] list_networks +- [ ] list_nodes +- [ ] list_proposal_votes +- [ ] list_proposals +- [ ] reject_invitation +- [ ] vote_on_proposal + +## marketplace-entitlement +0% implemented - [ ] get_entitlements -## marketplacecommerceanalytics - 0% implemented +## marketplacecommerceanalytics +0% implemented - [ ] generate_data_set - [ ] start_support_data_export -## mediaconvert - 0% implemented +## mediaconnect +0% implemented +- [ ] add_flow_outputs +- [ ] create_flow +- [ ] delete_flow +- [ ] describe_flow +- [ ] grant_flow_entitlements +- [ ] list_entitlements +- [ ] list_flows +- [ ] list_tags_for_resource +- [ ] remove_flow_output +- [ ] revoke_flow_entitlement +- [ ] start_flow +- [ ] stop_flow +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_flow_entitlement +- [ ] update_flow_output +- [ ] update_flow_source + +## mediaconvert +0% implemented +- [ ] associate_certificate - [ ] cancel_job - [ ] create_job - [ ] create_job_template @@ -2856,6 +4197,7 @@ - [ ] delete_preset - [ ] delete_queue - [ ] describe_endpoints +- [ ] disassociate_certificate - [ ] get_job - [ ] get_job_template - [ ] get_preset @@ -2864,30 +4206,49 @@ - [ ] list_jobs - [ ] list_presets - [ ] list_queues +- [ ] list_tags_for_resource +- [ ] tag_resource +- [ ] untag_resource - [ ] update_job_template - [ ] update_preset - [ ] update_queue -## medialive - 0% implemented +## medialive +0% implemented +- [ ] batch_update_schedule - [ ] create_channel - [ ] create_input - [ ] create_input_security_group +- [ ] create_tags - [ ] delete_channel - [ ] delete_input - [ ] delete_input_security_group +- [ ] delete_reservation +- [ ] delete_schedule +- [ ] delete_tags - [ ] describe_channel - [ ] describe_input - [ ] describe_input_security_group +- [ ] describe_offering +- [ ] describe_reservation +- [ ] describe_schedule - [ ] list_channels - [ ] list_input_security_groups - [ ] list_inputs +- [ ] list_offerings +- [ ] list_reservations +- [ ] list_tags_for_resource +- [ ] purchase_offering - [ ] start_channel - [ ] stop_channel - [ ] update_channel +- [ ] update_channel_class - [ ] update_input - [ ] update_input_security_group +- [ ] update_reservation -## mediapackage - 0% implemented +## mediapackage +0% implemented - [ ] create_channel - [ ] create_origin_endpoint - [ ] delete_channel @@ -2896,35 +4257,77 @@ - [ ] describe_origin_endpoint - [ ] list_channels - [ ] list_origin_endpoints +- [ ] list_tags_for_resource - [ ] rotate_channel_credentials +- [ ] rotate_ingest_endpoint_credentials +- [ ] tag_resource +- [ ] untag_resource - [ ] update_channel - [ ] update_origin_endpoint -## mediastore - 0% implemented +## mediapackage-vod +0% implemented +- [ ] create_asset +- [ ] create_packaging_configuration +- [ ] create_packaging_group +- [ ] delete_asset +- [ ] delete_packaging_configuration +- [ ] delete_packaging_group +- [ ] describe_asset +- [ ] describe_packaging_configuration +- [ ] describe_packaging_group +- [ ] list_assets +- [ ] list_packaging_configurations +- [ ] list_packaging_groups + +## mediastore +0% implemented - [ ] create_container - [ ] delete_container - [ ] delete_container_policy - [ ] delete_cors_policy +- [ ] delete_lifecycle_policy - [ ] describe_container - [ ] get_container_policy - [ ] get_cors_policy +- [ ] get_lifecycle_policy - [ ] list_containers +- [ ] list_tags_for_resource - [ ] put_container_policy - [ ] put_cors_policy +- [ ] put_lifecycle_policy +- [ ] start_access_logging +- [ ] stop_access_logging +- [ ] tag_resource +- [ ] untag_resource -## mediastore-data - 0% implemented +## mediastore-data +0% implemented - [ ] delete_object - [ ] describe_object - [ ] get_object - [ ] list_items - [ ] put_object -## meteringmarketplace - 0% implemented +## mediatailor +0% implemented +- [ ] delete_playback_configuration +- [ ] get_playback_configuration +- [ ] list_playback_configurations +- [ ] list_tags_for_resource +- [ ] put_playback_configuration +- [ ] tag_resource +- [ ] untag_resource + +## meteringmarketplace +0% implemented - [ ] batch_meter_usage - [ ] meter_usage +- [ ] register_usage - [ ] resolve_customer -## mgh - 0% implemented +## mgh +0% implemented - [ ] associate_created_artifact - [ ] associate_discovered_resource - [ ] create_progress_update_stream @@ -2942,7 +4345,8 @@ - [ ] notify_migration_task_state - [ ] put_resource_attributes -## mobile - 0% implemented +## mobile +0% implemented - [ ] create_project - [ ] delete_project - [ ] describe_bundle @@ -2953,26 +4357,33 @@ - [ ] list_projects - [ ] update_project -## mq - 0% implemented +## mq +0% implemented - [ ] create_broker - [ ] create_configuration +- [ ] create_tags - [ ] create_user - [ ] delete_broker +- [ ] delete_tags - [ ] delete_user - [ ] describe_broker +- [ ] describe_broker_engine_types +- [ ] describe_broker_instance_options - [ ] describe_configuration - [ ] describe_configuration_revision - [ ] describe_user - [ ] list_brokers - [ ] list_configuration_revisions - [ ] list_configurations +- [ ] list_tags - [ ] list_users - [ ] reboot_broker - [ ] update_broker - [ ] update_configuration - [ ] update_user -## mturk - 0% implemented +## mturk +0% implemented - [ ] accept_qualification_request - [ ] approve_assignment - [ ] associate_qualification_with_worker @@ -3013,7 +4424,68 @@ - [ ] update_notification_settings - [ ] update_qualification_type -## opsworks - 12% implemented +## neptune +0% implemented +- [ ] add_role_to_db_cluster +- [ ] add_source_identifier_to_subscription +- [ ] add_tags_to_resource +- [ ] apply_pending_maintenance_action +- [ ] copy_db_cluster_parameter_group +- [ ] copy_db_cluster_snapshot +- [ ] copy_db_parameter_group +- [ ] create_db_cluster +- [ ] create_db_cluster_parameter_group +- [ ] create_db_cluster_snapshot +- [ ] create_db_instance +- [ ] create_db_parameter_group +- [ ] create_db_subnet_group +- [ ] create_event_subscription +- [ ] delete_db_cluster +- [ ] delete_db_cluster_parameter_group +- [ ] delete_db_cluster_snapshot +- [ ] delete_db_instance +- [ ] delete_db_parameter_group +- [ ] delete_db_subnet_group +- [ ] delete_event_subscription +- [ ] describe_db_cluster_parameter_groups +- [ ] describe_db_cluster_parameters +- [ ] describe_db_cluster_snapshot_attributes +- [ ] describe_db_cluster_snapshots +- [ ] describe_db_clusters +- [ ] describe_db_engine_versions +- [ ] describe_db_instances +- [ ] describe_db_parameter_groups +- [ ] describe_db_parameters +- [ ] describe_db_subnet_groups +- [ ] describe_engine_default_cluster_parameters +- [ ] describe_engine_default_parameters +- [ ] describe_event_categories +- [ ] describe_event_subscriptions +- [ ] describe_events +- [ ] describe_orderable_db_instance_options +- [ ] describe_pending_maintenance_actions +- [ ] describe_valid_db_instance_modifications +- [ ] failover_db_cluster +- [ ] list_tags_for_resource +- [ ] modify_db_cluster +- [ ] modify_db_cluster_parameter_group +- [ ] modify_db_cluster_snapshot_attribute +- [ ] modify_db_instance +- [ ] modify_db_parameter_group +- [ ] modify_db_subnet_group +- [ ] modify_event_subscription +- [ ] promote_read_replica_db_cluster +- [ ] reboot_db_instance +- [ ] remove_role_from_db_cluster +- [ ] remove_source_identifier_from_subscription +- [ ] remove_tags_from_resource +- [ ] reset_db_cluster_parameter_group +- [ ] reset_db_parameter_group +- [ ] restore_db_cluster_from_snapshot +- [ ] restore_db_cluster_to_point_in_time + +## opsworks +12% implemented - [ ] assign_instance - [ ] assign_volume - [ ] associate_elastic_ip @@ -3089,7 +4561,8 @@ - [ ] update_user_profile - [ ] update_volume -## opsworkscm - 0% implemented +## opsworkscm +0% implemented - [ ] associate_node - [ ] create_backup - [ ] create_server @@ -3101,16 +4574,19 @@ - [ ] describe_node_association_status - [ ] describe_servers - [ ] disassociate_node +- [ ] export_server_engine_attribute - [ ] restore_server - [ ] start_maintenance - [ ] update_server - [ ] update_server_engine_attributes -## organizations - 47% implemented +## organizations +41% implemented - [ ] accept_handshake - [X] attach_policy - [ ] cancel_handshake - [X] create_account +- [ ] create_gov_cloud_account - [X] create_organization - [X] create_organizational_unit - [X] create_policy @@ -3144,13 +4620,70 @@ - [X] list_policies - [X] list_policies_for_target - [X] list_roots +- [ ] list_tags_for_resource - [X] list_targets_for_policy - [X] move_account - [ ] remove_account_from_organization +- [ ] tag_resource +- [ ] untag_resource - [ ] update_organizational_unit - [ ] update_policy -## pinpoint - 0% implemented +## personalize +0% implemented +- [ ] create_campaign +- [ ] create_dataset +- [ ] create_dataset_group +- [ ] create_dataset_import_job +- [ ] create_event_tracker +- [ ] create_schema +- [ ] create_solution +- [ ] create_solution_version +- [ ] delete_campaign +- [ ] delete_dataset +- [ ] delete_dataset_group +- [ ] delete_event_tracker +- [ ] delete_schema +- [ ] delete_solution +- [ ] describe_algorithm +- [ ] describe_campaign +- [ ] describe_dataset +- [ ] describe_dataset_group +- [ ] describe_dataset_import_job +- [ ] describe_event_tracker +- [ ] describe_feature_transformation +- [ ] describe_recipe +- [ ] describe_schema +- [ ] describe_solution +- [ ] describe_solution_version +- [ ] get_solution_metrics +- [ ] list_campaigns +- [ ] list_dataset_groups +- [ ] list_dataset_import_jobs +- [ ] list_datasets +- [ ] list_event_trackers +- [ ] list_recipes +- [ ] list_schemas +- [ ] list_solution_versions +- [ ] list_solutions +- [ ] update_campaign + +## personalize-events +0% implemented +- [ ] put_events + +## personalize-runtime +0% implemented +- [ ] get_personalized_ranking +- [ ] get_recommendations + +## pi +0% implemented +- [ ] describe_dimension_keys +- [ ] get_resource_metrics + +## pinpoint +0% implemented - [ ] create_app - [ ] create_campaign - [ ] create_export_job @@ -3170,20 +4703,25 @@ - [ ] delete_gcm_channel - [ ] delete_segment - [ ] delete_sms_channel +- [ ] delete_user_endpoints +- [ ] delete_voice_channel - [ ] get_adm_channel - [ ] get_apns_channel - [ ] get_apns_sandbox_channel - [ ] get_apns_voip_channel - [ ] get_apns_voip_sandbox_channel - [ ] get_app +- [ ] get_application_date_range_kpi - [ ] get_application_settings - [ ] get_apps - [ ] get_baidu_channel - [ ] get_campaign - [ ] get_campaign_activities +- [ ] get_campaign_date_range_kpi - [ ] get_campaign_version - [ ] get_campaign_versions - [ ] get_campaigns +- [ ] get_channels - [ ] get_email_channel - [ ] get_endpoint - [ ] get_event_stream @@ -3199,9 +4737,17 @@ - [ ] get_segment_versions - [ ] get_segments - [ ] get_sms_channel +- [ ] get_user_endpoints +- [ ] get_voice_channel +- [ ] list_tags_for_resource +- [ ] phone_number_validate - [ ] put_event_stream +- [ ] put_events +- [ ] remove_attributes - [ ] send_messages - [ ] send_users_messages +- [ ] tag_resource +- [ ] untag_resource - [ ] update_adm_channel - [ ] update_apns_channel - [ ] update_apns_sandbox_channel @@ -3216,32 +4762,135 @@ - [ ] update_gcm_channel - [ ] update_segment - [ ] update_sms_channel +- [ ] update_voice_channel -## polly - 83% implemented +## pinpoint-email +0% implemented +- [ ] create_configuration_set +- [ ] create_configuration_set_event_destination +- [ ] create_dedicated_ip_pool +- [ ] create_deliverability_test_report +- [ ] create_email_identity +- [ ] delete_configuration_set +- [ ] delete_configuration_set_event_destination +- [ ] delete_dedicated_ip_pool +- [ ] delete_email_identity +- [ ] get_account +- [ ] get_blacklist_reports +- [ ] get_configuration_set +- [ ] get_configuration_set_event_destinations +- [ ] get_dedicated_ip +- [ ] get_dedicated_ips +- [ ] get_deliverability_dashboard_options +- [ ] get_deliverability_test_report +- [ ] get_domain_deliverability_campaign +- [ ] get_domain_statistics_report +- [ ] get_email_identity +- [ ] list_configuration_sets +- [ ] list_dedicated_ip_pools +- [ ] list_deliverability_test_reports +- [ ] list_domain_deliverability_campaigns +- [ ] list_email_identities +- [ ] list_tags_for_resource +- [ ] put_account_dedicated_ip_warmup_attributes +- [ ] put_account_sending_attributes +- [ ] put_configuration_set_delivery_options +- [ ] put_configuration_set_reputation_options +- [ ] put_configuration_set_sending_options +- [ ] put_configuration_set_tracking_options +- [ ] put_dedicated_ip_in_pool +- [ ] put_dedicated_ip_warmup_attributes +- [ ] put_deliverability_dashboard_option +- [ ] put_email_identity_dkim_attributes +- [ ] put_email_identity_feedback_attributes +- [ ] put_email_identity_mail_from_attributes +- [ ] send_email +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_configuration_set_event_destination + +## pinpoint-sms-voice +0% implemented +- [ ] create_configuration_set +- [ ] create_configuration_set_event_destination +- [ ] delete_configuration_set +- [ ] delete_configuration_set_event_destination +- [ ] get_configuration_set_event_destinations +- [ ] send_voice_message +- [ ] update_configuration_set_event_destination + +## polly +55% implemented - [X] delete_lexicon - [X] describe_voices - [X] get_lexicon +- [ ] get_speech_synthesis_task - [X] list_lexicons +- [ ] list_speech_synthesis_tasks - [X] put_lexicon +- [ ] start_speech_synthesis_task - [ ] synthesize_speech -## pricing - 0% implemented +## pricing +0% implemented - [ ] describe_services - [ ] get_attribute_values - [ ] get_products -## rds - 0% implemented +## quicksight +0% implemented +- [ ] create_group +- [ ] create_group_membership +- [ ] delete_group +- [ ] delete_group_membership +- [ ] delete_user +- [ ] delete_user_by_principal_id +- [ ] describe_group +- [ ] describe_user +- [ ] get_dashboard_embed_url +- [ ] list_group_memberships +- [ ] list_groups +- [ ] list_user_groups +- [ ] list_users +- [ ] register_user +- [ ] update_group +- [ ] update_user + +## ram +0% implemented +- [ ] accept_resource_share_invitation +- [ ] associate_resource_share +- [ ] create_resource_share +- [ ] delete_resource_share +- [ ] disassociate_resource_share +- [ ] enable_sharing_with_aws_organization +- [ ] get_resource_policies +- [ ] get_resource_share_associations +- [ ] get_resource_share_invitations +- [ ] get_resource_shares +- [ ] list_principals +- [ ] list_resources +- [ ] reject_resource_share_invitation +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_resource_share + +## rds +0% implemented - [ ] add_role_to_db_cluster +- [ ] add_role_to_db_instance - [ ] add_source_identifier_to_subscription - [ ] add_tags_to_resource - [ ] apply_pending_maintenance_action - [ ] authorize_db_security_group_ingress +- [ ] backtrack_db_cluster - [ ] copy_db_cluster_parameter_group - [ ] copy_db_cluster_snapshot - [ ] copy_db_parameter_group - [ ] copy_db_snapshot - [ ] copy_option_group - [ ] create_db_cluster +- [ ] create_db_cluster_endpoint - [ ] create_db_cluster_parameter_group - [ ] create_db_cluster_snapshot - [ ] create_db_instance @@ -3251,25 +4900,32 @@ - [ ] create_db_snapshot - [ ] create_db_subnet_group - [ ] create_event_subscription +- [ ] create_global_cluster - [ ] create_option_group - [ ] delete_db_cluster +- [ ] delete_db_cluster_endpoint - [ ] delete_db_cluster_parameter_group - [ ] delete_db_cluster_snapshot - [ ] delete_db_instance +- [ ] delete_db_instance_automated_backup - [ ] delete_db_parameter_group - [ ] delete_db_security_group - [ ] delete_db_snapshot - [ ] delete_db_subnet_group - [ ] delete_event_subscription +- [ ] delete_global_cluster - [ ] delete_option_group - [ ] describe_account_attributes - [ ] describe_certificates +- [ ] describe_db_cluster_backtracks +- [ ] describe_db_cluster_endpoints - [ ] describe_db_cluster_parameter_groups - [ ] describe_db_cluster_parameters - [ ] describe_db_cluster_snapshot_attributes - [ ] describe_db_cluster_snapshots - [ ] describe_db_clusters - [ ] describe_db_engine_versions +- [ ] describe_db_instance_automated_backups - [ ] describe_db_instances - [ ] describe_db_log_files - [ ] describe_db_parameter_groups @@ -3283,6 +4939,7 @@ - [ ] describe_event_categories - [ ] describe_event_subscriptions - [ ] describe_events +- [ ] describe_global_clusters - [ ] describe_option_group_options - [ ] describe_option_groups - [ ] describe_orderable_db_instance_options @@ -3294,7 +4951,9 @@ - [ ] download_db_log_file_portion - [ ] failover_db_cluster - [ ] list_tags_for_resource +- [ ] modify_current_db_cluster_capacity - [ ] modify_db_cluster +- [ ] modify_db_cluster_endpoint - [ ] modify_db_cluster_parameter_group - [ ] modify_db_cluster_snapshot_attribute - [ ] modify_db_instance @@ -3303,12 +4962,15 @@ - [ ] modify_db_snapshot_attribute - [ ] modify_db_subnet_group - [ ] modify_event_subscription +- [ ] modify_global_cluster - [ ] modify_option_group - [ ] promote_read_replica - [ ] promote_read_replica_db_cluster - [ ] purchase_reserved_db_instances_offering - [ ] reboot_db_instance +- [ ] remove_from_global_cluster - [ ] remove_role_from_db_cluster +- [ ] remove_role_from_db_instance - [ ] remove_source_identifier_from_subscription - [ ] remove_tags_from_resource - [ ] reset_db_cluster_parameter_group @@ -3320,12 +4982,30 @@ - [ ] restore_db_instance_from_s3 - [ ] restore_db_instance_to_point_in_time - [ ] revoke_db_security_group_ingress +- [ ] start_activity_stream +- [ ] start_db_cluster - [ ] start_db_instance +- [ ] stop_activity_stream +- [ ] stop_db_cluster - [ ] stop_db_instance -## redshift - 41% implemented +## rds-data +0% implemented +- [ ] batch_execute_statement +- [ ] begin_transaction +- [ ] commit_transaction +- [ ] execute_sql +- [ ] execute_statement +- [ ] rollback_transaction + +## redshift +32% implemented +- [ ] accept_reserved_node_exchange - [ ] authorize_cluster_security_group_ingress - [ ] authorize_snapshot_access +- [ ] batch_delete_cluster_snapshots +- [ ] batch_modify_cluster_snapshots +- [ ] cancel_resize - [ ] copy_cluster_snapshot - [X] create_cluster - [X] create_cluster_parameter_group @@ -3336,6 +5016,7 @@ - [ ] create_hsm_client_certificate - [ ] create_hsm_configuration - [X] create_snapshot_copy_grant +- [ ] create_snapshot_schedule - [X] create_tags - [X] delete_cluster - [X] delete_cluster_parameter_group @@ -3346,12 +5027,16 @@ - [ ] delete_hsm_client_certificate - [ ] delete_hsm_configuration - [X] delete_snapshot_copy_grant +- [ ] delete_snapshot_schedule - [X] delete_tags +- [ ] describe_account_attributes +- [ ] describe_cluster_db_revisions - [X] describe_cluster_parameter_groups - [ ] describe_cluster_parameters - [X] describe_cluster_security_groups - [X] describe_cluster_snapshots - [X] describe_cluster_subnet_groups +- [ ] describe_cluster_tracks - [ ] describe_cluster_versions - [X] describe_clusters - [ ] describe_default_cluster_parameters @@ -3366,6 +5051,8 @@ - [ ] describe_reserved_nodes - [ ] describe_resize - [X] describe_snapshot_copy_grants +- [ ] describe_snapshot_schedules +- [ ] describe_storage - [ ] describe_table_restore_status - [X] describe_tags - [ ] disable_logging @@ -3373,28 +5060,37 @@ - [ ] enable_logging - [X] enable_snapshot_copy - [ ] get_cluster_credentials +- [ ] get_reserved_node_exchange_offerings - [X] modify_cluster +- [ ] modify_cluster_db_revision - [ ] modify_cluster_iam_roles +- [ ] modify_cluster_maintenance - [ ] modify_cluster_parameter_group +- [ ] modify_cluster_snapshot +- [ ] modify_cluster_snapshot_schedule - [ ] modify_cluster_subnet_group - [ ] modify_event_subscription - [X] modify_snapshot_copy_retention_period +- [ ] modify_snapshot_schedule - [ ] purchase_reserved_node_offering - [ ] reboot_cluster - [ ] reset_cluster_parameter_group +- [ ] resize_cluster - [X] restore_from_cluster_snapshot - [ ] restore_table_from_cluster_snapshot - [ ] revoke_cluster_security_group_ingress - [ ] revoke_snapshot_access - [ ] rotate_encryption_key -## rekognition - 0% implemented +## rekognition +0% implemented - [ ] compare_faces - [ ] create_collection - [ ] create_stream_processor - [ ] delete_collection - [ ] delete_faces - [ ] delete_stream_processor +- [ ] describe_collection - [ ] describe_stream_processor - [ ] detect_faces - [ ] detect_labels @@ -3423,28 +5119,70 @@ - [ ] start_stream_processor - [ ] stop_stream_processor -## resource-groups - 62% implemented +## resource-groups +75% implemented - [X] create_group - [X] delete_group - [X] get_group -- [X] get_group_query -- [ ] get_tags +- [ ] get_group_query +- [X] get_tags - [ ] list_group_resources - [X] list_groups - [ ] search_resources -- [ ] tag -- [ ] untag +- [X] tag +- [X] untag - [X] update_group - [X] update_group_query -## resourcegroupstaggingapi - 60% implemented +## resourcegroupstaggingapi +60% implemented - [X] get_resources - [X] get_tag_keys - [X] get_tag_values - [ ] tag_resources - [ ] untag_resources -## route53 - 12% implemented +## robomaker +0% implemented +- [ ] batch_describe_simulation_job +- [ ] cancel_deployment_job +- [ ] cancel_simulation_job +- [ ] create_deployment_job +- [ ] create_fleet +- [ ] create_robot +- [ ] create_robot_application +- [ ] create_robot_application_version +- [ ] create_simulation_application +- [ ] create_simulation_application_version +- [ ] create_simulation_job +- [ ] delete_fleet +- [ ] delete_robot +- [ ] delete_robot_application +- [ ] delete_simulation_application +- [ ] deregister_robot +- [ ] describe_deployment_job +- [ ] describe_fleet +- [ ] describe_robot +- [ ] describe_robot_application +- [ ] describe_simulation_application +- [ ] describe_simulation_job +- [ ] list_deployment_jobs +- [ ] list_fleets +- [ ] list_robot_applications +- [ ] list_robots +- [ ] list_simulation_applications +- [ ] list_simulation_jobs +- [ ] list_tags_for_resource +- [ ] register_robot +- [ ] restart_simulation_job +- [ ] sync_deployment_job +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_robot_application +- [ ] update_simulation_application + +## route53 +12% implemented - [ ] associate_vpc_with_hosted_zone - [ ] change_resource_record_sets - [X] change_tags_for_resource @@ -3502,7 +5240,8 @@ - [ ] update_traffic_policy_comment - [ ] update_traffic_policy_instance -## route53domains - 0% implemented +## route53domains +0% implemented - [ ] check_domain_availability - [ ] check_domain_transferability - [ ] delete_tags_for_domain @@ -3528,7 +5267,33 @@ - [ ] update_tags_for_domain - [ ] view_billing -## s3 - 15% implemented +## route53resolver +0% implemented +- [ ] associate_resolver_endpoint_ip_address +- [ ] associate_resolver_rule +- [ ] create_resolver_endpoint +- [ ] create_resolver_rule +- [ ] delete_resolver_endpoint +- [ ] delete_resolver_rule +- [ ] disassociate_resolver_endpoint_ip_address +- [ ] disassociate_resolver_rule +- [ ] get_resolver_endpoint +- [ ] get_resolver_rule +- [ ] get_resolver_rule_association +- [ ] get_resolver_rule_policy +- [ ] list_resolver_endpoint_ip_addresses +- [ ] list_resolver_endpoints +- [ ] list_resolver_rule_associations +- [ ] list_resolver_rules +- [ ] list_tags_for_resource +- [ ] put_resolver_rule_policy +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_resolver_endpoint +- [ ] update_resolver_rule + +## s3 +14% implemented - [ ] abort_multipart_upload - [ ] complete_multipart_upload - [ ] copy_object @@ -3548,7 +5313,8 @@ - [ ] delete_object - [ ] delete_object_tagging - [ ] delete_objects -- [X] get_bucket_accelerate_configuration +- [ ] delete_public_access_block +- [ ] get_bucket_accelerate_configuration - [X] get_bucket_acl - [ ] get_bucket_analytics_configuration - [ ] get_bucket_cors @@ -3556,12 +5322,13 @@ - [ ] get_bucket_inventory_configuration - [ ] get_bucket_lifecycle - [ ] get_bucket_lifecycle_configuration -- [X] get_bucket_location +- [ ] get_bucket_location - [ ] get_bucket_logging - [ ] get_bucket_metrics_configuration - [ ] get_bucket_notification - [ ] get_bucket_notification_configuration - [X] get_bucket_policy +- [ ] get_bucket_policy_status - [ ] get_bucket_replication - [ ] get_bucket_request_payment - [ ] get_bucket_tagging @@ -3569,8 +5336,12 @@ - [ ] get_bucket_website - [ ] get_object - [ ] get_object_acl +- [ ] get_object_legal_hold +- [ ] get_object_lock_configuration +- [ ] get_object_retention - [ ] get_object_tagging - [ ] get_object_torrent +- [ ] get_public_access_block - [ ] head_bucket - [ ] head_object - [ ] list_bucket_analytics_configurations @@ -3602,52 +5373,111 @@ - [ ] put_bucket_website - [ ] put_object - [ ] put_object_acl +- [ ] put_object_legal_hold +- [ ] put_object_lock_configuration +- [ ] put_object_retention - [ ] put_object_tagging +- [ ] put_public_access_block - [ ] restore_object - [ ] select_object_content - [ ] upload_part - [ ] upload_part_copy -## sagemaker - 0% implemented +## s3control +0% implemented +- [ ] create_job +- [ ] delete_public_access_block +- [ ] describe_job +- [ ] get_public_access_block +- [ ] list_jobs +- [ ] put_public_access_block +- [ ] update_job_priority +- [ ] update_job_status + +## sagemaker +0% implemented - [ ] add_tags +- [ ] create_algorithm +- [ ] create_code_repository +- [ ] create_compilation_job - [ ] create_endpoint - [ ] create_endpoint_config +- [ ] create_hyper_parameter_tuning_job +- [ ] create_labeling_job - [ ] create_model +- [ ] create_model_package - [ ] create_notebook_instance - [ ] create_notebook_instance_lifecycle_config - [ ] create_presigned_notebook_instance_url - [ ] create_training_job +- [ ] create_transform_job +- [ ] create_workteam +- [ ] delete_algorithm +- [ ] delete_code_repository - [ ] delete_endpoint - [ ] delete_endpoint_config - [ ] delete_model +- [ ] delete_model_package - [ ] delete_notebook_instance - [ ] delete_notebook_instance_lifecycle_config - [ ] delete_tags +- [ ] delete_workteam +- [ ] describe_algorithm +- [ ] describe_code_repository +- [ ] describe_compilation_job - [ ] describe_endpoint - [ ] describe_endpoint_config +- [ ] describe_hyper_parameter_tuning_job +- [ ] describe_labeling_job - [ ] describe_model +- [ ] describe_model_package - [ ] describe_notebook_instance - [ ] describe_notebook_instance_lifecycle_config +- [ ] describe_subscribed_workteam - [ ] describe_training_job +- [ ] describe_transform_job +- [ ] describe_workteam +- [ ] get_search_suggestions +- [ ] list_algorithms +- [ ] list_code_repositories +- [ ] list_compilation_jobs - [ ] list_endpoint_configs - [ ] list_endpoints +- [ ] list_hyper_parameter_tuning_jobs +- [ ] list_labeling_jobs +- [ ] list_labeling_jobs_for_workteam +- [ ] list_model_packages - [ ] list_models - [ ] list_notebook_instance_lifecycle_configs - [ ] list_notebook_instances +- [ ] list_subscribed_workteams - [ ] list_tags - [ ] list_training_jobs +- [ ] list_training_jobs_for_hyper_parameter_tuning_job +- [ ] list_transform_jobs +- [ ] list_workteams +- [ ] render_ui_template +- [ ] search - [ ] start_notebook_instance +- [ ] stop_compilation_job +- [ ] stop_hyper_parameter_tuning_job +- [ ] stop_labeling_job - [ ] stop_notebook_instance - [ ] stop_training_job +- [ ] stop_transform_job +- [ ] update_code_repository - [ ] update_endpoint - [ ] update_endpoint_weights_and_capacities - [ ] update_notebook_instance - [ ] update_notebook_instance_lifecycle_config +- [ ] update_workteam -## sagemaker-runtime - 0% implemented +## sagemaker-runtime +0% implemented - [ ] invoke_endpoint -## sdb - 0% implemented +## sdb +0% implemented - [ ] batch_delete_attributes - [ ] batch_put_attributes - [ ] create_domain @@ -3659,15 +5489,19 @@ - [ ] put_attributes - [ ] select -## secretsmanager - 33% implemented +## secretsmanager +55% implemented - [ ] cancel_rotate_secret - [X] create_secret +- [ ] delete_resource_policy - [X] delete_secret - [X] describe_secret - [X] get_random_password +- [ ] get_resource_policy - [X] get_secret_value - [X] list_secret_version_ids - [X] list_secrets +- [ ] put_resource_policy - [X] put_secret_value - [X] restore_secret - [X] rotate_secret @@ -3676,23 +5510,92 @@ - [ ] update_secret - [ ] update_secret_version_stage -## serverlessrepo - 0% implemented +## securityhub +0% implemented +- [ ] accept_invitation +- [ ] batch_disable_standards +- [ ] batch_enable_standards +- [ ] batch_import_findings +- [ ] create_action_target +- [ ] create_insight +- [ ] create_members +- [ ] decline_invitations +- [ ] delete_action_target +- [ ] delete_insight +- [ ] delete_invitations +- [ ] delete_members +- [ ] describe_action_targets +- [ ] describe_hub +- [ ] describe_products +- [ ] disable_import_findings_for_product +- [ ] disable_security_hub +- [ ] disassociate_from_master_account +- [ ] disassociate_members +- [ ] enable_import_findings_for_product +- [ ] enable_security_hub +- [ ] get_enabled_standards +- [ ] get_findings +- [ ] get_insight_results +- [ ] get_insights +- [ ] get_invitations_count +- [ ] get_master_account +- [ ] get_members +- [ ] invite_members +- [ ] list_enabled_products_for_import +- [ ] list_invitations +- [ ] list_members +- [ ] list_tags_for_resource +- [ ] tag_resource +- [ ] untag_resource +- [ ] update_action_target +- [ ] update_findings +- [ ] update_insight + +## serverlessrepo +0% implemented - [ ] create_application - [ ] create_application_version - [ ] create_cloud_formation_change_set +- [ ] create_cloud_formation_template - [ ] delete_application - [ ] get_application - [ ] get_application_policy +- [ ] get_cloud_formation_template +- [ ] list_application_dependencies - [ ] list_application_versions - [ ] list_applications - [ ] put_application_policy - [ ] update_application -## servicecatalog - 0% implemented +## service-quotas +0% implemented +- [ ] associate_service_quota_template +- [ ] delete_service_quota_increase_request_from_template +- [ ] disassociate_service_quota_template +- [ ] get_association_for_service_quota_template +- [ ] get_aws_default_service_quota +- [ ] get_requested_service_quota_change +- [ ] get_service_quota +- [ ] get_service_quota_increase_request_from_template +- [ ] list_aws_default_service_quotas +- [ ] list_requested_service_quota_change_history +- [ ] list_requested_service_quota_change_history_by_quota +- [ ] list_service_quota_increase_requests_in_template +- [ ] list_service_quotas +- [ ] list_services +- [ ] put_service_quota_increase_request_into_template +- [ ] request_service_quota_increase + +## servicecatalog +0% implemented - [ ] accept_portfolio_share +- [ ] associate_budget_with_resource - [ ] associate_principal_with_portfolio - [ ] associate_product_with_portfolio +- [ ] associate_service_action_with_provisioning_artifact - [ ] associate_tag_option_with_resource +- [ ] batch_associate_service_action_with_provisioning_artifact +- [ ] batch_disassociate_service_action_from_provisioning_artifact - [ ] copy_product - [ ] create_constraint - [ ] create_portfolio @@ -3700,6 +5603,7 @@ - [ ] create_product - [ ] create_provisioned_product_plan - [ ] create_provisioning_artifact +- [ ] create_service_action - [ ] create_tag_option - [ ] delete_constraint - [ ] delete_portfolio @@ -3707,10 +5611,12 @@ - [ ] delete_product - [ ] delete_provisioned_product_plan - [ ] delete_provisioning_artifact +- [ ] delete_service_action - [ ] delete_tag_option - [ ] describe_constraint - [ ] describe_copy_product_status - [ ] describe_portfolio +- [ ] describe_portfolio_share_status - [ ] describe_product - [ ] describe_product_as_admin - [ ] describe_product_view @@ -3719,22 +5625,36 @@ - [ ] describe_provisioning_artifact - [ ] describe_provisioning_parameters - [ ] describe_record +- [ ] describe_service_action +- [ ] describe_service_action_execution_parameters - [ ] describe_tag_option +- [ ] disable_aws_organizations_access +- [ ] disassociate_budget_from_resource - [ ] disassociate_principal_from_portfolio - [ ] disassociate_product_from_portfolio +- [ ] disassociate_service_action_from_provisioning_artifact - [ ] disassociate_tag_option_from_resource +- [ ] enable_aws_organizations_access - [ ] execute_provisioned_product_plan +- [ ] execute_provisioned_product_service_action +- [ ] get_aws_organizations_access_status - [ ] list_accepted_portfolio_shares +- [ ] list_budgets_for_resource - [ ] list_constraints_for_portfolio - [ ] list_launch_paths +- [ ] list_organization_portfolio_access - [ ] list_portfolio_access - [ ] list_portfolios - [ ] list_portfolios_for_product - [ ] list_principals_for_portfolio - [ ] list_provisioned_product_plans - [ ] list_provisioning_artifacts +- [ ] list_provisioning_artifacts_for_service_action - [ ] list_record_history - [ ] list_resources_for_tag_option +- [ ] list_service_actions +- [ ] list_service_actions_for_provisioning_artifact +- [ ] list_stack_instances_for_provisioned_product - [ ] list_tag_options - [ ] provision_product - [ ] reject_portfolio_share @@ -3747,16 +5667,21 @@ - [ ] update_portfolio - [ ] update_product - [ ] update_provisioned_product +- [ ] update_provisioned_product_properties - [ ] update_provisioning_artifact +- [ ] update_service_action - [ ] update_tag_option -## servicediscovery - 0% implemented +## servicediscovery +0% implemented +- [ ] create_http_namespace - [ ] create_private_dns_namespace - [ ] create_public_dns_namespace - [ ] create_service - [ ] delete_namespace - [ ] delete_service - [ ] deregister_instance +- [ ] discover_instances - [ ] get_instance - [ ] get_instances_health_status - [ ] get_namespace @@ -3770,7 +5695,8 @@ - [ ] update_instance_custom_health_status - [ ] update_service -## ses - 11% implemented +## ses +12% implemented - [ ] clone_receipt_rule_set - [ ] create_configuration_set - [ ] create_configuration_set_event_destination @@ -3813,6 +5739,7 @@ - [ ] list_receipt_rule_sets - [ ] list_templates - [X] list_verified_email_addresses +- [ ] put_configuration_set_delivery_options - [ ] put_identity_policy - [ ] reorder_receipt_rule_set - [ ] send_bounce @@ -3826,7 +5753,7 @@ - [ ] set_identity_feedback_forwarding_enabled - [ ] set_identity_headers_in_notifications_enabled - [ ] set_identity_mail_from_domain -- [ ] set_identity_notification_topic +- [X] set_identity_notification_topic - [ ] set_receipt_rule_position - [ ] test_render_template - [ ] update_account_sending_enabled @@ -3842,32 +5769,83 @@ - [X] verify_email_address - [X] verify_email_identity -## shield - 0% implemented +## shield +0% implemented +- [ ] associate_drt_log_bucket +- [ ] associate_drt_role - [ ] create_protection - [ ] create_subscription - [ ] delete_protection - [ ] delete_subscription - [ ] describe_attack +- [ ] describe_drt_access +- [ ] describe_emergency_contact_settings - [ ] describe_protection - [ ] describe_subscription +- [ ] disassociate_drt_log_bucket +- [ ] disassociate_drt_role - [ ] get_subscription_state - [ ] list_attacks - [ ] list_protections +- [ ] update_emergency_contact_settings +- [ ] update_subscription -## sms - 0% implemented +## signer +0% implemented +- [ ] cancel_signing_profile +- [ ] describe_signing_job +- [ ] get_signing_platform +- [ ] get_signing_profile +- [ ] list_signing_jobs +- [ ] list_signing_platforms +- [ ] list_signing_profiles +- [ ] put_signing_profile +- [ ] start_signing_job + +## sms +0% implemented +- [ ] create_app - [ ] create_replication_job +- [ ] delete_app +- [ ] delete_app_launch_configuration +- [ ] delete_app_replication_configuration - [ ] delete_replication_job - [ ] delete_server_catalog - [ ] disassociate_connector +- [ ] generate_change_set +- [ ] generate_template +- [ ] get_app +- [ ] get_app_launch_configuration +- [ ] get_app_replication_configuration - [ ] get_connectors - [ ] get_replication_jobs - [ ] get_replication_runs - [ ] get_servers - [ ] import_server_catalog +- [ ] launch_app +- [ ] list_apps +- [ ] put_app_launch_configuration +- [ ] put_app_replication_configuration +- [ ] start_app_replication - [ ] start_on_demand_replication_run +- [ ] stop_app_replication +- [ ] terminate_app +- [ ] update_app - [ ] update_replication_job -## snowball - 0% implemented +## sms-voice +0% implemented +- [ ] create_configuration_set +- [ ] create_configuration_set_event_destination +- [ ] delete_configuration_set +- [ ] delete_configuration_set_event_destination +- [ ] get_configuration_set_event_destinations +- [ ] list_configuration_sets +- [ ] send_voice_message +- [ ] update_configuration_set_event_destination + +## snowball +0% implemented - [ ] cancel_cluster - [ ] cancel_job - [ ] create_address @@ -3882,11 +5860,13 @@ - [ ] get_snowball_usage - [ ] list_cluster_jobs - [ ] list_clusters +- [ ] list_compatible_images - [ ] list_jobs - [ ] update_cluster - [ ] update_job -## sns - 53% implemented +## sns +48% implemented - [ ] add_permission - [ ] check_if_phone_number_is_opted_out - [ ] confirm_subscription @@ -3906,6 +5886,7 @@ - [X] list_platform_applications - [X] list_subscriptions - [ ] list_subscriptions_by_topic +- [ ] list_tags_for_resource - [X] list_topics - [ ] opt_in_phone_number - [X] publish @@ -3916,9 +5897,12 @@ - [X] set_subscription_attributes - [ ] set_topic_attributes - [X] subscribe +- [ ] tag_resource - [X] unsubscribe +- [ ] untag_resource -## sqs - 65% implemented +## sqs +65% implemented - [X] add_permission - [X] change_message_visibility - [ ] change_message_visibility_batch @@ -3927,7 +5911,7 @@ - [ ] delete_message_batch - [X] delete_queue - [ ] get_queue_attributes -- [X] get_queue_url +- [ ] get_queue_url - [X] list_dead_letter_source_queues - [ ] list_queue_tags - [X] list_queues @@ -3940,19 +5924,23 @@ - [X] tag_queue - [X] untag_queue -## ssm - 11% implemented +## ssm +10% implemented - [X] add_tags_to_resource - [ ] cancel_command +- [ ] cancel_maintenance_window_execution - [ ] create_activation - [ ] create_association - [ ] create_association_batch - [ ] create_document - [ ] create_maintenance_window +- [ ] create_ops_item - [ ] create_patch_baseline - [ ] create_resource_data_sync - [ ] delete_activation - [ ] delete_association - [ ] delete_document +- [ ] delete_inventory - [ ] delete_maintenance_window - [X] delete_parameter - [X] delete_parameters @@ -3964,6 +5952,8 @@ - [ ] deregister_task_from_maintenance_window - [ ] describe_activations - [ ] describe_association +- [ ] describe_association_execution_targets +- [ ] describe_association_executions - [ ] describe_automation_executions - [ ] describe_automation_step_executions - [ ] describe_available_patches @@ -3976,18 +5966,25 @@ - [ ] describe_instance_patch_states - [ ] describe_instance_patch_states_for_patch_group - [ ] describe_instance_patches +- [ ] describe_inventory_deletions - [ ] describe_maintenance_window_execution_task_invocations - [ ] describe_maintenance_window_execution_tasks - [ ] describe_maintenance_window_executions +- [ ] describe_maintenance_window_schedule - [ ] describe_maintenance_window_targets - [ ] describe_maintenance_window_tasks - [ ] describe_maintenance_windows +- [ ] describe_maintenance_windows_for_target +- [ ] describe_ops_items - [ ] describe_parameters - [ ] describe_patch_baselines - [ ] describe_patch_group_state - [ ] describe_patch_groups +- [ ] describe_patch_properties +- [ ] describe_sessions - [ ] get_automation_execution -- [ ] get_command_invocation +- [X] get_command_invocation +- [ ] get_connection_status - [ ] get_default_patch_baseline - [ ] get_deployable_patch_snapshot_for_instance - [ ] get_document @@ -3998,12 +5995,16 @@ - [ ] get_maintenance_window_execution_task - [ ] get_maintenance_window_execution_task_invocation - [ ] get_maintenance_window_task +- [ ] get_ops_item +- [ ] get_ops_summary - [X] get_parameter - [ ] get_parameter_history - [X] get_parameters - [X] get_parameters_by_path - [ ] get_patch_baseline - [ ] get_patch_baseline_for_patch_group +- [ ] get_service_setting +- [ ] label_parameter_version - [ ] list_association_versions - [ ] list_associations - [ ] list_command_invocations @@ -4025,10 +6026,15 @@ - [ ] register_target_with_maintenance_window - [ ] register_task_with_maintenance_window - [X] remove_tags_from_resource +- [ ] reset_service_setting +- [ ] resume_session - [ ] send_automation_signal - [X] send_command +- [ ] start_associations_once - [ ] start_automation_execution +- [ ] start_session - [ ] stop_automation_execution +- [ ] terminate_session - [ ] update_association - [ ] update_association_status - [ ] update_document @@ -4037,9 +6043,12 @@ - [ ] update_maintenance_window_target - [ ] update_maintenance_window_task - [ ] update_managed_instance_role +- [ ] update_ops_item - [ ] update_patch_baseline +- [ ] update_service_setting -## stepfunctions - 0% implemented +## stepfunctions +0% implemented - [ ] create_activity - [ ] create_state_machine - [ ] delete_activity @@ -4053,23 +6062,30 @@ - [ ] list_activities - [ ] list_executions - [ ] list_state_machines +- [ ] list_tags_for_resource - [ ] send_task_failure - [ ] send_task_heartbeat - [ ] send_task_success - [ ] start_execution - [ ] stop_execution +- [ ] tag_resource +- [ ] untag_resource - [ ] update_state_machine -## storagegateway - 0% implemented +## storagegateway +0% implemented - [ ] activate_gateway - [ ] add_cache - [ ] add_tags_to_resource - [ ] add_upload_buffer - [ ] add_working_storage +- [ ] assign_tape_pool +- [ ] attach_volume - [ ] cancel_archival - [ ] cancel_retrieval - [ ] create_cached_iscsi_volume - [ ] create_nfs_file_share +- [ ] create_smb_file_share - [ ] create_snapshot - [ ] create_snapshot_from_volume_recovery_point - [ ] create_stored_iscsi_volume @@ -4090,6 +6106,8 @@ - [ ] describe_gateway_information - [ ] describe_maintenance_start_time - [ ] describe_nfs_file_shares +- [ ] describe_smb_file_shares +- [ ] describe_smb_settings - [ ] describe_snapshot_schedule - [ ] describe_stored_iscsi_volumes - [ ] describe_tape_archives @@ -4098,7 +6116,9 @@ - [ ] describe_upload_buffer - [ ] describe_vtl_devices - [ ] describe_working_storage +- [ ] detach_volume - [ ] disable_gateway +- [ ] join_domain - [ ] list_file_shares - [ ] list_gateways - [ ] list_local_disks @@ -4114,6 +6134,7 @@ - [ ] retrieve_tape_archive - [ ] retrieve_tape_recovery_point - [ ] set_local_console_password +- [ ] set_smb_guest_password - [ ] shutdown_gateway - [ ] start_gateway - [ ] update_bandwidth_rate_limit @@ -4122,19 +6143,24 @@ - [ ] update_gateway_software_now - [ ] update_maintenance_start_time - [ ] update_nfs_file_share +- [ ] update_smb_file_share +- [ ] update_smb_security_strategy - [ ] update_snapshot_schedule - [ ] update_vtl_device_type -## sts - 42% implemented +## sts +50% implemented - [X] assume_role - [ ] assume_role_with_saml - [X] assume_role_with_web_identity - [ ] decode_authorization_message +- [ ] get_access_key_info - [ ] get_caller_identity - [X] get_federation_token - [X] get_session_token -## support - 0% implemented +## support +0% implemented - [ ] add_attachments_to_set - [ ] add_communication_to_case - [ ] create_case @@ -4150,7 +6176,8 @@ - [ ] refresh_trusted_advisor_check - [ ] resolve_case -## swf - 58% implemented +## swf +48% implemented - [ ] count_closed_workflow_executions - [ ] count_open_workflow_executions - [X] count_pending_activity_tasks @@ -4167,6 +6194,7 @@ - [X] list_closed_workflow_executions - [X] list_domains - [X] list_open_workflow_executions +- [ ] list_tags_for_resource - [ ] list_workflow_types - [X] poll_for_activity_task - [X] poll_for_decision_task @@ -4181,10 +6209,26 @@ - [X] respond_decision_task_completed - [X] signal_workflow_execution - [X] start_workflow_execution +- [ ] tag_resource - [X] terminate_workflow_execution +- [ ] undeprecate_activity_type +- [ ] undeprecate_domain +- [ ] undeprecate_workflow_type +- [ ] untag_resource -## transcribe - 0% implemented +## textract +0% implemented +- [ ] analyze_document +- [ ] detect_document_text +- [ ] get_document_analysis +- [ ] get_document_text_detection +- [ ] start_document_analysis +- [ ] start_document_text_detection + +## transcribe +0% implemented - [ ] create_vocabulary +- [ ] delete_transcription_job - [ ] delete_vocabulary - [ ] get_transcription_job - [ ] get_vocabulary @@ -4193,10 +6237,37 @@ - [ ] start_transcription_job - [ ] update_vocabulary -## translate - 0% implemented +## transfer +0% implemented +- [ ] create_server +- [ ] create_user +- [ ] delete_server +- [ ] delete_ssh_public_key +- [ ] delete_user +- [ ] describe_server +- [ ] describe_user +- [ ] import_ssh_public_key +- [ ] list_servers +- [ ] list_tags_for_resource +- [ ] list_users +- [ ] start_server +- [ ] stop_server +- [ ] tag_resource +- [ ] test_identity_provider +- [ ] untag_resource +- [ ] update_server +- [ ] update_user + +## translate +0% implemented +- [ ] delete_terminology +- [ ] get_terminology +- [ ] import_terminology +- [ ] list_terminologies - [ ] translate_text -## waf - 0% implemented +## waf +0% implemented - [ ] create_byte_match_set - [ ] create_geo_match_set - [ ] create_ip_set @@ -4212,6 +6283,7 @@ - [ ] delete_byte_match_set - [ ] delete_geo_match_set - [ ] delete_ip_set +- [ ] delete_logging_configuration - [ ] delete_permission_policy - [ ] delete_rate_based_rule - [ ] delete_regex_match_set @@ -4227,6 +6299,7 @@ - [ ] get_change_token_status - [ ] get_geo_match_set - [ ] get_ip_set +- [ ] get_logging_configuration - [ ] get_permission_policy - [ ] get_rate_based_rule - [ ] get_rate_based_rule_managed_keys @@ -4243,6 +6316,7 @@ - [ ] list_byte_match_sets - [ ] list_geo_match_sets - [ ] list_ip_sets +- [ ] list_logging_configurations - [ ] list_rate_based_rules - [ ] list_regex_match_sets - [ ] list_regex_pattern_sets @@ -4251,9 +6325,13 @@ - [ ] list_size_constraint_sets - [ ] list_sql_injection_match_sets - [ ] list_subscribed_rule_groups +- [ ] list_tags_for_resource - [ ] list_web_acls - [ ] list_xss_match_sets +- [ ] put_logging_configuration - [ ] put_permission_policy +- [ ] tag_resource +- [ ] untag_resource - [ ] update_byte_match_set - [ ] update_geo_match_set - [ ] update_ip_set @@ -4267,7 +6345,8 @@ - [ ] update_web_acl - [ ] update_xss_match_set -## waf-regional - 0% implemented +## waf-regional +0% implemented - [ ] associate_web_acl - [ ] create_byte_match_set - [ ] create_geo_match_set @@ -4284,6 +6363,7 @@ - [ ] delete_byte_match_set - [ ] delete_geo_match_set - [ ] delete_ip_set +- [ ] delete_logging_configuration - [ ] delete_permission_policy - [ ] delete_rate_based_rule - [ ] delete_regex_match_set @@ -4300,6 +6380,7 @@ - [ ] get_change_token_status - [ ] get_geo_match_set - [ ] get_ip_set +- [ ] get_logging_configuration - [ ] get_permission_policy - [ ] get_rate_based_rule - [ ] get_rate_based_rule_managed_keys @@ -4317,6 +6398,7 @@ - [ ] list_byte_match_sets - [ ] list_geo_match_sets - [ ] list_ip_sets +- [ ] list_logging_configurations - [ ] list_rate_based_rules - [ ] list_regex_match_sets - [ ] list_regex_pattern_sets @@ -4326,9 +6408,13 @@ - [ ] list_size_constraint_sets - [ ] list_sql_injection_match_sets - [ ] list_subscribed_rule_groups +- [ ] list_tags_for_resource - [ ] list_web_acls - [ ] list_xss_match_sets +- [ ] put_logging_configuration - [ ] put_permission_policy +- [ ] tag_resource +- [ ] untag_resource - [ ] update_byte_match_set - [ ] update_geo_match_set - [ ] update_ip_set @@ -4342,7 +6428,8 @@ - [ ] update_web_acl - [ ] update_xss_match_set -## workdocs - 0% implemented +## workdocs +0% implemented - [ ] abort_document_version_upload - [ ] activate_user - [ ] add_resource_permissions @@ -4376,6 +6463,7 @@ - [ ] get_document_version - [ ] get_folder - [ ] get_folder_path +- [ ] get_resources - [ ] initiate_document_version_upload - [ ] remove_all_resource_permissions - [ ] remove_resource_permission @@ -4384,7 +6472,41 @@ - [ ] update_folder - [ ] update_user -## workmail - 0% implemented +## worklink +0% implemented +- [ ] associate_domain +- [ ] associate_website_authorization_provider +- [ ] associate_website_certificate_authority +- [ ] create_fleet +- [ ] delete_fleet +- [ ] describe_audit_stream_configuration +- [ ] describe_company_network_configuration +- [ ] describe_device +- [ ] describe_device_policy_configuration +- [ ] describe_domain +- [ ] describe_fleet_metadata +- [ ] describe_identity_provider_configuration +- [ ] describe_website_certificate_authority +- [ ] disassociate_domain +- [ ] disassociate_website_authorization_provider +- [ ] disassociate_website_certificate_authority +- [ ] list_devices +- [ ] list_domains +- [ ] list_fleets +- [ ] list_website_authorization_providers +- [ ] list_website_certificate_authorities +- [ ] restore_domain_access +- [ ] revoke_domain_access +- [ ] sign_out_user +- [ ] update_audit_stream_configuration +- [ ] update_company_network_configuration +- [ ] update_device_policy_configuration +- [ ] update_domain_metadata +- [ ] update_fleet_metadata +- [ ] update_identity_provider_configuration + +## workmail +0% implemented - [ ] associate_delegate_to_resource - [ ] associate_member_to_group - [ ] create_alias @@ -4403,6 +6525,7 @@ - [ ] describe_user - [ ] disassociate_delegate_from_resource - [ ] disassociate_member_from_group +- [ ] get_mailbox_details - [ ] list_aliases - [ ] list_group_members - [ ] list_groups @@ -4414,29 +6537,65 @@ - [ ] put_mailbox_permissions - [ ] register_to_work_mail - [ ] reset_password +- [ ] update_mailbox_quota - [ ] update_primary_email_address - [ ] update_resource -## workspaces - 0% implemented +## workspaces +0% implemented +- [ ] associate_ip_groups +- [ ] authorize_ip_rules +- [ ] copy_workspace_image +- [ ] create_ip_group - [ ] create_tags - [ ] create_workspaces +- [ ] delete_ip_group - [ ] delete_tags +- [ ] delete_workspace_image +- [ ] describe_account +- [ ] describe_account_modifications +- [ ] describe_client_properties +- [ ] describe_ip_groups - [ ] describe_tags - [ ] describe_workspace_bundles - [ ] describe_workspace_directories +- [ ] describe_workspace_images - [ ] describe_workspaces - [ ] describe_workspaces_connection_status +- [ ] disassociate_ip_groups +- [ ] import_workspace_image +- [ ] list_available_management_cidr_ranges +- [ ] modify_account +- [ ] modify_client_properties - [ ] modify_workspace_properties +- [ ] modify_workspace_state - [ ] reboot_workspaces - [ ] rebuild_workspaces +- [ ] revoke_ip_rules - [ ] start_workspaces - [ ] stop_workspaces - [ ] terminate_workspaces +- [ ] update_rules_of_ip_group -## xray - 0% implemented +## xray +0% implemented - [ ] batch_get_traces +- [ ] create_group +- [ ] create_sampling_rule +- [ ] delete_group +- [ ] delete_sampling_rule +- [ ] get_encryption_config +- [ ] get_group +- [ ] get_groups +- [ ] get_sampling_rules +- [ ] get_sampling_statistic_summaries +- [ ] get_sampling_targets - [ ] get_service_graph +- [ ] get_time_series_service_statistics - [ ] get_trace_graph - [ ] get_trace_summaries +- [ ] put_encryption_config - [ ] put_telemetry_records - [ ] put_trace_segments +- [ ] update_group +- [ ] update_sampling_rule diff --git a/scripts/implementation_coverage.py b/scripts/implementation_coverage.py index 4e385e1d6..0e1816088 100755 --- a/scripts/implementation_coverage.py +++ b/scripts/implementation_coverage.py @@ -61,7 +61,8 @@ def print_implementation_coverage(coverage): percentage_implemented = 0 print("") - print("## {} - {}% implemented".format(service_name, percentage_implemented)) + print("## {}\n".format(service_name)) + print("{}% implemented\n".format(percentage_implemented)) for op in operations: if op in implemented: print("- [X] {}".format(op)) @@ -93,7 +94,8 @@ def write_implementation_coverage_to_file(coverage): percentage_implemented = 0 file.write("\n") - file.write("## {} - {}% implemented\n".format(service_name, percentage_implemented)) + file.write("## {}\n".format(service_name)) + file.write("{}% implemented\n".format(percentage_implemented)) for op in operations: if op in implemented: file.write("- [X] {}\n".format(op)) From addb63108124c5e2b6c4062aa834c5a958f94e25 Mon Sep 17 00:00:00 2001 From: acsbendi Date: Thu, 22 Aug 2019 11:06:42 +0200 Subject: [PATCH 39/42] Skip checking the expiration of AssumedRole in server mode. --- tests/test_sts/test_sts.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_sts/test_sts.py b/tests/test_sts/test_sts.py index ac7c4ea11..b047a8d13 100644 --- a/tests/test_sts/test_sts.py +++ b/tests/test_sts/test_sts.py @@ -9,7 +9,7 @@ from nose.tools import assert_raises import sure # noqa -from moto import mock_sts, mock_sts_deprecated, mock_iam +from moto import mock_sts, mock_sts_deprecated, mock_iam, settings from moto.iam.models import ACCOUNT_ID from moto.sts.responses import MAX_FEDERATION_TOKEN_POLICY_LENGTH @@ -72,7 +72,8 @@ def test_assume_role(): Policy=policy, DurationSeconds=900) credentials = assume_role_response['Credentials'] - credentials['Expiration'].isoformat().should.equal('2012-01-01T12:15:00+00:00') + if not settings.TEST_SERVER_MODE: + credentials['Expiration'].isoformat().should.equal('2012-01-01T12:15:00+00:00') credentials['SessionToken'].should.have.length_of(356) assert credentials['SessionToken'].startswith("FQoGZXIvYXdzE") credentials['AccessKeyId'].should.have.length_of(20) From 956592d6154d101bbf0d4090bafbfbd7f6540e4f Mon Sep 17 00:00:00 2001 From: Bert Blommers Date: Thu, 22 Aug 2019 16:12:48 +0100 Subject: [PATCH 40/42] 2380 - Validate parameter-list for duplicates in dynamodb.batch_get_item --- moto/dynamodb2/responses.py | 12 +++++++ tests/test_dynamodb2/test_dynamodb.py | 52 +++++++++++++++++++++++++++ 2 files changed, 64 insertions(+) diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py index 86ca9a362..3e9fbb553 100644 --- a/moto/dynamodb2/responses.py +++ b/moto/dynamodb2/responses.py @@ -318,6 +318,9 @@ class DynamoHandler(BaseResponse): for table_name, table_request in table_batches.items(): keys = table_request['Keys'] + if self._contains_duplicates(keys): + er = 'com.amazon.coral.validate#ValidationException' + return self.error(er, 'Provided list of item keys contains duplicates') attributes_to_get = table_request.get('AttributesToGet') results["Responses"][table_name] = [] for key in keys: @@ -333,6 +336,15 @@ class DynamoHandler(BaseResponse): }) return dynamo_json_dump(results) + def _contains_duplicates(self, keys): + unique_keys = [] + for k in keys: + if k in unique_keys: + return True + else: + unique_keys.append(k) + return False + def query(self): name = self.body['TableName'] # {u'KeyConditionExpression': u'#n0 = :v0', u'ExpressionAttributeValues': {u':v0': {u'S': u'johndoe'}}, u'ExpressionAttributeNames': {u'#n0': u'username'}} diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py index a8f73bee6..fb6c0e17d 100644 --- a/tests/test_dynamodb2/test_dynamodb.py +++ b/tests/test_dynamodb2/test_dynamodb.py @@ -2141,3 +2141,55 @@ def test_scan_by_non_exists_index(): ex.exception.response['Error']['Message'].should.equal( 'The table does not have the specified index: non_exists_index' ) + + +@mock_dynamodb2 +def test_batch_items_returns_all(): + dynamodb = _create_user_table() + returned_items = dynamodb.batch_get_item(RequestItems={ + 'users': { + 'Keys': [{ + 'username': {'S': 'user0'} + }, { + 'username': {'S': 'user1'} + }, { + 'username': {'S': 'user2'} + }, { + 'username': {'S': 'user3'} + }], + 'ConsistentRead': True + } + })['Responses']['users'] + assert len(returned_items) == 3 + assert [item['username']['S'] for item in returned_items] == ['user1', 'user2', 'user3'] + + +@mock_dynamodb2 +def test_batch_items_should_throw_exception_for_duplicate_request(): + client = _create_user_table() + with assert_raises(ClientError) as ex: + client.batch_get_item(RequestItems={ + 'users': { + 'Keys': [{ + 'username': {'S': 'user0'} + }, { + 'username': {'S': 'user0'} + }], + 'ConsistentRead': True + }}) + ex.exception.response['Error']['Code'].should.equal('ValidationException') + ex.exception.response['Error']['Message'].should.equal('Provided list of item keys contains duplicates') + + +def _create_user_table(): + client = boto3.client('dynamodb', region_name='us-east-1') + client.create_table( + TableName='users', + KeySchema=[{'AttributeName': 'username', 'KeyType': 'HASH'}], + AttributeDefinitions=[{'AttributeName': 'username', 'AttributeType': 'S'}], + ProvisionedThroughput={'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5} + ) + client.put_item(TableName='users', Item={'username': {'S': 'user1'}, 'foo': {'S': 'bar'}}) + client.put_item(TableName='users', Item={'username': {'S': 'user2'}, 'foo': {'S': 'bar'}}) + client.put_item(TableName='users', Item={'username': {'S': 'user3'}, 'foo': {'S': 'bar'}}) + return client From cf2dae0ce8866f67ba088b36bafe3ec6c9827e1c Mon Sep 17 00:00:00 2001 From: acsbendi Date: Thu, 22 Aug 2019 18:09:52 +0200 Subject: [PATCH 41/42] Calling sts:GetCallerIdentity is always allowed. --- moto/core/access_control.py | 2 ++ tests/test_core/test_auth.py | 21 +++++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/moto/core/access_control.py b/moto/core/access_control.py index c64acf20c..3fb11eebd 100644 --- a/moto/core/access_control.py +++ b/moto/core/access_control.py @@ -172,6 +172,8 @@ class IAMRequestBase(object): self._raise_signature_does_not_match() def check_action_permitted(self): + if self._action == 'sts:GetCallerIdentity': # always allowed, even if there's an explicit Deny for it + return True policies = self._access_key.collect_policies() permitted = False diff --git a/tests/test_core/test_auth.py b/tests/test_core/test_auth.py index 3a1107eaa..00229f808 100644 --- a/tests/test_core/test_auth.py +++ b/tests/test_core/test_auth.py @@ -273,6 +273,27 @@ def test_access_denied_with_denying_policy(): ) +@set_initial_no_auth_action_count(3) +@mock_sts +def test_get_caller_identity_allowed_with_denying_policy(): + user_name = 'test-user' + inline_policy_document = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Deny", + "Action": "sts:GetCallerIdentity", + "Resource": "*" + } + ] + } + access_key = create_user_with_access_key_and_inline_policy(user_name, inline_policy_document) + client = boto3.client('sts', region_name='us-east-1', + aws_access_key_id=access_key['AccessKeyId'], + aws_secret_access_key=access_key['SecretAccessKey']) + client.get_caller_identity().should.be.a(dict) + + @set_initial_no_auth_action_count(3) @mock_ec2 def test_allowed_with_wildcard_action(): From 1efd9ee58d5e9d5e24c0de23802bbe142db07970 Mon Sep 17 00:00:00 2001 From: Randy Westergren Date: Thu, 22 Aug 2019 19:28:11 -0400 Subject: [PATCH 42/42] Raise exception on invalid event source type and use full spec --- moto/awslambda/models.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py index b31e067c3..acc7a5257 100644 --- a/moto/awslambda/models.py +++ b/moto/awslambda/models.py @@ -435,7 +435,6 @@ class EventSourceMapping(BaseModel): self.event_source_arn = spec['EventSourceArn'] self.uuid = str(uuid.uuid4()) self.last_modified = time.mktime(datetime.datetime.utcnow().timetuple()) - self.batch_size = '' # Default to blank # BatchSize service default/max mapping batch_size_map = { @@ -453,6 +452,9 @@ class EventSourceMapping(BaseModel): "BatchSize {} exceeds the max of {}".format(batch_size, batch_size_entry[1])) else: self.batch_size = batch_size + else: + raise ValueError("InvalidParameterValueException", + "Unsupported event source type") # optional self.starting_position = spec.get('StartingPosition', 'TRIM_HORIZON') @@ -668,7 +670,7 @@ class LambdaBackend(BaseBackend): raise RESTError('InvalidParameterValueException', 'Missing {}'.format(param)) # Validate function name - func = self._lambdas.get_function_by_name_or_arn(spec.get('FunctionName', '')) + func = self._lambdas.get_function_by_name_or_arn(spec.pop('FunctionName', '')) if not func: raise RESTError('ResourceNotFoundException', 'Invalid FunctionName') @@ -682,11 +684,8 @@ class LambdaBackend(BaseBackend): raise RESTError('InvalidParameterValueException', '{} is FIFO'.format(queue.queue_arn)) else: - esm_spec = { - 'EventSourceArn': spec['EventSourceArn'], - 'FunctionArn': func.function_arn, - } - esm = EventSourceMapping(esm_spec) + spec.update({'FunctionArn': func.function_arn}) + esm = EventSourceMapping(spec) self._event_source_mappings[esm.uuid] = esm # Set backend function on queue