Merge branch 'master' into mark-alias-target

This commit is contained in:
Steve Pulec 2019-07-07 23:11:53 -05:00 committed by GitHub
commit 414ff930ed
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 20598 additions and 1590 deletions

View File

@ -2,36 +2,56 @@ dist: xenial
language: python language: python
sudo: false sudo: false
services: services:
- docker - docker
python: python:
- 2.7 - 2.7
- 3.6 - 3.6
- 3.7 - 3.7
env: env:
- TEST_SERVER_MODE=false - TEST_SERVER_MODE=false
- TEST_SERVER_MODE=true - TEST_SERVER_MODE=true
before_install: before_install:
- export BOTO_CONFIG=/dev/null - export BOTO_CONFIG=/dev/null
install: install:
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use - |
# -d for docker run so the logs show up in travis python setup.py sdist
# Python images come from here: https://hub.docker.com/_/python/
- |
python setup.py sdist
if [ "$TEST_SERVER_MODE" = "true" ]; then if [ "$TEST_SERVER_MODE" = "true" ]; then
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${TRAVIS_PYTHON_VERSION}-stretch /moto/travis_moto_server.sh & docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${TRAVIS_PYTHON_VERSION}-stretch /moto/travis_moto_server.sh &
fi fi
travis_retry pip install boto==2.45.0 travis_retry pip install boto==2.45.0
travis_retry pip install boto3 travis_retry pip install boto3
travis_retry pip install dist/moto*.gz travis_retry pip install dist/moto*.gz
travis_retry pip install coveralls==1.1 travis_retry pip install coveralls==1.1
travis_retry pip install -r requirements-dev.txt travis_retry pip install -r requirements-dev.txt
if [ "$TEST_SERVER_MODE" = "true" ]; then if [ "$TEST_SERVER_MODE" = "true" ]; then
python wait_for.py python wait_for.py
fi fi
script: script:
- make test - make test
after_success: after_success:
- coveralls - coveralls
before_deploy:
- git checkout $TRAVIS_BRANCH
- git fetch --unshallow
- python update_version_from_git.py
deploy:
- provider: pypi
distributions: sdist bdist_wheel
user: spulec
password:
secure: NxnPylnTfekJmGyoufCw0lMoYRskSMJzvAIyAlJJVYKwEhmiCPOrdy5qV8i8mRZ1AkUsqU3jBZ/PD56n96clHW0E3d080UleRDj6JpyALVdeLfMqZl9kLmZ8bqakWzYq3VSJKw2zGP/L4tPGf8wTK1SUv9yl/YNDsBdCkjDverw=
on:
branch:
- master
skip_cleanup: true
skip_existing: true
- provider: pypi
distributions: sdist bdist_wheel
user: spulec
password:
secure: NxnPylnTfekJmGyoufCw0lMoYRskSMJzvAIyAlJJVYKwEhmiCPOrdy5qV8i8mRZ1AkUsqU3jBZ/PD56n96clHW0E3d080UleRDj6JpyALVdeLfMqZl9kLmZ8bqakWzYq3VSJKw2zGP/L4tPGf8wTK1SUv9yl/YNDsBdCkjDverw=
on:
tags: true
skip_existing: true

View File

@ -2012,23 +2012,23 @@
- [ ] upload_archive - [ ] upload_archive
- [ ] upload_multipart_part - [ ] upload_multipart_part
## glue - 0% implemented ## glue - 23% implemented
- [ ] batch_create_partition - [x] batch_create_partition
- [ ] batch_delete_connection - [ ] batch_delete_connection
- [ ] batch_delete_partition - [x] batch_delete_partition
- [ ] batch_delete_table - [x] batch_delete_table
- [ ] batch_delete_table_version - [ ] batch_delete_table_version
- [ ] batch_get_partition - [ ] batch_get_partition
- [ ] batch_stop_job_run - [ ] batch_stop_job_run
- [ ] create_classifier - [ ] create_classifier
- [ ] create_connection - [ ] create_connection
- [ ] create_crawler - [ ] create_crawler
- [ ] create_database - [x] create_database
- [ ] create_dev_endpoint - [ ] create_dev_endpoint
- [ ] create_job - [ ] create_job
- [ ] create_partition - [x] create_partition
- [ ] create_script - [ ] create_script
- [ ] create_table - [x] create_table
- [ ] create_trigger - [ ] create_trigger
- [ ] create_user_defined_function - [ ] create_user_defined_function
- [ ] delete_classifier - [ ] delete_classifier
@ -2037,8 +2037,8 @@
- [ ] delete_database - [ ] delete_database
- [ ] delete_dev_endpoint - [ ] delete_dev_endpoint
- [ ] delete_job - [ ] delete_job
- [ ] delete_partition - [x] delete_partition
- [ ] delete_table - [x] delete_table
- [ ] delete_table_version - [ ] delete_table_version
- [ ] delete_trigger - [ ] delete_trigger
- [ ] delete_user_defined_function - [ ] delete_user_defined_function
@ -2050,7 +2050,7 @@
- [ ] get_crawler - [ ] get_crawler
- [ ] get_crawler_metrics - [ ] get_crawler_metrics
- [ ] get_crawlers - [ ] get_crawlers
- [ ] get_database - [x] get_database
- [ ] get_databases - [ ] get_databases
- [ ] get_dataflow_graph - [ ] get_dataflow_graph
- [ ] get_dev_endpoint - [ ] get_dev_endpoint
@ -2060,13 +2060,13 @@
- [ ] get_job_runs - [ ] get_job_runs
- [ ] get_jobs - [ ] get_jobs
- [ ] get_mapping - [ ] get_mapping
- [ ] get_partition - [x] get_partition
- [ ] get_partitions - [x] get_partitions
- [ ] get_plan - [ ] get_plan
- [ ] get_table - [x] get_table
- [ ] get_table_version - [x] get_table_version
- [ ] get_table_versions - [x] get_table_versions
- [ ] get_tables - [x] get_tables
- [ ] get_trigger - [ ] get_trigger
- [ ] get_triggers - [ ] get_triggers
- [ ] get_user_defined_function - [ ] get_user_defined_function
@ -2087,8 +2087,8 @@
- [ ] update_database - [ ] update_database
- [ ] update_dev_endpoint - [ ] update_dev_endpoint
- [ ] update_job - [ ] update_job
- [ ] update_partition - [x] update_partition
- [ ] update_table - [x] update_table
- [ ] update_trigger - [ ] update_trigger
- [ ] update_user_defined_function - [ ] update_user_defined_function

186
README.md
View File

@ -55,95 +55,95 @@ With the decorator wrapping the test, all the calls to s3 are automatically mock
It gets even better! Moto isn't just for Python code and it isn't just for S3. Look at the [standalone server mode](https://github.com/spulec/moto#stand-alone-server-mode) for more information about running Moto with other languages. Here's the status of the other AWS services implemented: It gets even better! Moto isn't just for Python code and it isn't just for S3. Look at the [standalone server mode](https://github.com/spulec/moto#stand-alone-server-mode) for more information about running Moto with other languages. Here's the status of the other AWS services implemented:
```gherkin ```gherkin
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Service Name | Decorator | Development Status | | Service Name | Decorator | Development Status |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| ACM | @mock_acm | all endpoints done | | ACM | @mock_acm | all endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| API Gateway | @mock_apigateway | core endpoints done | | API Gateway | @mock_apigateway | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Autoscaling | @mock_autoscaling| core endpoints done | | Autoscaling | @mock_autoscaling | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Cloudformation | @mock_cloudformation| core endpoints done | | Cloudformation | @mock_cloudformation | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Cloudwatch | @mock_cloudwatch | basic endpoints done | | Cloudwatch | @mock_cloudwatch | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| CloudwatchEvents | @mock_events | all endpoints done | | CloudwatchEvents | @mock_events | all endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Cognito Identity | @mock_cognitoidentity| basic endpoints done | | Cognito Identity | @mock_cognitoidentity | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Cognito Identity Provider | @mock_cognitoidp| basic endpoints done | | Cognito Identity Provider | @mock_cognitoidp | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Config | @mock_config | basic endpoints done | | Config | @mock_config | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Data Pipeline | @mock_datapipeline| basic endpoints done | | Data Pipeline | @mock_datapipeline | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| DynamoDB | @mock_dynamodb | core endpoints done | | DynamoDB | @mock_dynamodb | core endpoints done |
| DynamoDB2 | @mock_dynamodb2 | all endpoints + partial indexes | | DynamoDB2 | @mock_dynamodb2 | all endpoints + partial indexes |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| EC2 | @mock_ec2 | core endpoints done | | EC2 | @mock_ec2 | core endpoints done |
| - AMI | | core endpoints done | | - AMI | | core endpoints done |
| - EBS | | core endpoints done | | - EBS | | core endpoints done |
| - Instances | | all endpoints done | | - Instances | | all endpoints done |
| - Security Groups | | core endpoints done | | - Security Groups | | core endpoints done |
| - Tags | | all endpoints done | | - Tags | | all endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| ECR | @mock_ecr | basic endpoints done | | ECR | @mock_ecr | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| ECS | @mock_ecs | basic endpoints done | | ECS | @mock_ecs | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| ELB | @mock_elb | core endpoints done | | ELB | @mock_elb | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| ELBv2 | @mock_elbv2 | all endpoints done | | ELBv2 | @mock_elbv2 | all endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| EMR | @mock_emr | core endpoints done | | EMR | @mock_emr | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Glacier | @mock_glacier | core endpoints done | | Glacier | @mock_glacier | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| IAM | @mock_iam | core endpoints done | | IAM | @mock_iam | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| IoT | @mock_iot | core endpoints done | | IoT | @mock_iot | core endpoints done |
| | @mock_iotdata | core endpoints done | | | @mock_iotdata | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Lambda | @mock_lambda | basic endpoints done, requires | | Kinesis | @mock_kinesis | core endpoints done |
| | | docker | |-------------------------------------------------------------------------------------|
|------------------------------------------------------------------------------| | KMS | @mock_kms | basic endpoints done |
| Logs | @mock_logs | basic endpoints done | |-------------------------------------------------------------------------------------|
|------------------------------------------------------------------------------| | Lambda | @mock_lambda | basic endpoints done, requires |
| Kinesis | @mock_kinesis | core endpoints done | | | | docker |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| KMS | @mock_kms | basic endpoints done | | Logs | @mock_logs | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Organizations | @mock_organizations | some core endpoints done | | Organizations | @mock_organizations | some core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Polly | @mock_polly | all endpoints done | | Polly | @mock_polly | all endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| RDS | @mock_rds | core endpoints done | | RDS | @mock_rds | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| RDS2 | @mock_rds2 | core endpoints done | | RDS2 | @mock_rds2 | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Redshift | @mock_redshift | core endpoints done | | Redshift | @mock_redshift | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| Route53 | @mock_route53 | core endpoints done | | Route53 | @mock_route53 | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| S3 | @mock_s3 | core endpoints done | | S3 | @mock_s3 | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| SecretsManager | @mock_secretsmanager | basic endpoints done | SecretsManager | @mock_secretsmanager | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| SES | @mock_ses | all endpoints done | | SES | @mock_ses | all endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| SNS | @mock_sns | all endpoints done | | SNS | @mock_sns | all endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| SQS | @mock_sqs | core endpoints done | | SQS | @mock_sqs | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| SSM | @mock_ssm | core endpoints done | | SSM | @mock_ssm | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| STS | @mock_sts | core endpoints done | | STS | @mock_sts | core endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| SWF | @mock_swf | basic endpoints done | | SWF | @mock_swf | basic endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
| X-Ray | @mock_xray | all endpoints done | | X-Ray | @mock_xray | all endpoints done |
|------------------------------------------------------------------------------| |-------------------------------------------------------------------------------------|
``` ```
For a full list of endpoint [implementation coverage](https://github.com/spulec/moto/blob/master/IMPLEMENTATION_COVERAGE.md) For a full list of endpoint [implementation coverage](https://github.com/spulec/moto/blob/master/IMPLEMENTATION_COVERAGE.md)
@ -318,3 +318,11 @@ boto3.resource(
```console ```console
$ pip install moto $ pip install moto
``` ```
## Releases
Releases are done from travisci. Fairly closely following this:
https://docs.travis-ci.com/user/deployment/pypi/
- Commits to `master` branch do a dev deploy to pypi.
- Commits to a tag do a real deploy to pypi.

View File

@ -17,66 +17,95 @@ with ``moto`` and its usage.
Currently implemented Services: Currently implemented Services:
------------------------------- -------------------------------
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| Service Name | Decorator | Development Status | | Service Name | Decorator | Development Status |
+=======================+=====================+===================================+ +===========================+=======================+====================================+
| API Gateway | @mock_apigateway | core endpoints done | | ACM | @mock_acm | all endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| Autoscaling | @mock_autoscaling | core endpoints done | | API Gateway | @mock_apigateway | core endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| Cloudformation | @mock_cloudformation| core endpoints done | | Autoscaling | @mock_autoscaling | core endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| Cloudwatch | @mock_cloudwatch | basic endpoints done | | Cloudformation | @mock_cloudformation | core endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| Data Pipeline | @mock_datapipeline | basic endpoints done | | Cloudwatch | @mock_cloudwatch | basic endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| - DynamoDB | - @mock_dynamodb | - core endpoints done | | CloudwatchEvents | @mock_events | all endpoints done |
| - DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes| +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | Cognito Identity | @mock_cognitoidentity | all endpoints done |
| EC2 | @mock_ec2 | core endpoints done | +---------------------------+-----------------------+------------------------------------+
| - AMI | | - core endpoints done | | Cognito Identity Provider | @mock_cognitoidp | all endpoints done |
| - EBS | | - core endpoints done | +---------------------------+-----------------------+------------------------------------+
| - Instances | | - all endpoints done | | Config | @mock_config | basic endpoints done |
| - Security Groups | | - core endpoints done | +---------------------------+-----------------------+------------------------------------+
| - Tags | | - all endpoints done | | Data Pipeline | @mock_datapipeline | basic endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| ECS | @mock_ecs | basic endpoints done | | DynamoDB | - @mock_dynamodb | - core endpoints done |
+-----------------------+---------------------+-----------------------------------+ | DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes |
| ELB | @mock_elb | core endpoints done | +---------------------------+-----------------------+------------------------------------+
| | @mock_elbv2 | core endpoints done | | EC2 | @mock_ec2 | core endpoints done |
+-----------------------+---------------------+-----------------------------------+ | - AMI | | - core endpoints done |
| EMR | @mock_emr | core endpoints done | | - EBS | | - core endpoints done |
+-----------------------+---------------------+-----------------------------------+ | - Instances | | - all endpoints done |
| Glacier | @mock_glacier | core endpoints done | | - Security Groups | | - core endpoints done |
+-----------------------+---------------------+-----------------------------------+ | - Tags | | - all endpoints done |
| IAM | @mock_iam | core endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | ECR | @mock_ecr | basic endpoints done |
| Lambda | @mock_lambda | basic endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | ECS | @mock_ecs | basic endpoints done |
| Kinesis | @mock_kinesis | core endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | ELB | @mock_elb | core endpoints done |
| KMS | @mock_kms | basic endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | ELBv2 | @mock_elbv2 | all endpoints done |
| RDS | @mock_rds | core endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | EMR | @mock_emr | core endpoints done |
| RDS2 | @mock_rds2 | core endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | Glacier | @mock_glacier | core endpoints done |
| Redshift | @mock_redshift | core endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | IAM | @mock_iam | core endpoints done |
| Route53 | @mock_route53 | core endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | IoT | @mock_iot | core endpoints done |
| S3 | @mock_s3 | core endpoints done | | | @mock_iotdata | core endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| SES | @mock_ses | core endpoints done | | Kinesis | @mock_kinesis | core endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| SNS | @mock_sns | core endpoints done | | KMS | @mock_kms | basic endpoints done |
+-----------------------+---------------------+-----------------------------------+ +---------------------------+-----------------------+------------------------------------+
| SQS | @mock_sqs | core endpoints done | | Lambda | @mock_lambda | basic endpoints done, |
+-----------------------+---------------------+-----------------------------------+ | | | requires docker |
| STS | @mock_sts | core endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | Logs | @mock_logs | basic endpoints done |
| SWF | @mock_swf | basic endpoints done | +---------------------------+-----------------------+------------------------------------+
+-----------------------+---------------------+-----------------------------------+ | Organizations | @mock_organizations | some core edpoints done |
+---------------------------+-----------------------+------------------------------------+
| Polly | @mock_polly | all endpoints done |
+---------------------------+-----------------------+------------------------------------+
| RDS | @mock_rds | core endpoints done |
+---------------------------+-----------------------+------------------------------------+
| RDS2 | @mock_rds2 | core endpoints done |
+---------------------------+-----------------------+------------------------------------+
| Redshift | @mock_redshift | core endpoints done |
+---------------------------+-----------------------+------------------------------------+
| Route53 | @mock_route53 | core endpoints done |
+---------------------------+-----------------------+------------------------------------+
| S3 | @mock_s3 | core endpoints done |
+---------------------------+-----------------------+------------------------------------+
| SecretsManager | @mock_secretsmanager | basic endpoints done |
+---------------------------+-----------------------+------------------------------------+
| SES | @mock_ses | all endpoints done |
+---------------------------+-----------------------+------------------------------------+
| SNS | @mock_sns | all endpoints done |
+---------------------------+-----------------------+------------------------------------+
| SQS | @mock_sqs | core endpoints done |
+---------------------------+-----------------------+------------------------------------+
| SSM | @mock_ssm | core endpoints done |
+---------------------------+-----------------------+------------------------------------+
| STS | @mock_sts | core endpoints done |
+---------------------------+-----------------------+------------------------------------+
| SWF | @mock_swf | basic endpoints done |
+---------------------------+-----------------------+------------------------------------+
| X-Ray | @mock_xray | all endpoints done |
+---------------------------+-----------------------+------------------------------------+

View File

@ -3,7 +3,7 @@ import logging
# logging.getLogger('boto').setLevel(logging.CRITICAL) # logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = 'moto' __title__ = 'moto'
__version__ = '1.3.8' __version__ = '1.3.9'
from .acm import mock_acm # flake8: noqa from .acm import mock_acm # flake8: noqa
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa

View File

@ -246,7 +246,8 @@ def resource_name_property_from_type(resource_type):
def generate_resource_name(resource_type, stack_name, logical_id): def generate_resource_name(resource_type, stack_name, logical_id):
if resource_type == "AWS::ElasticLoadBalancingV2::TargetGroup": if resource_type in ["AWS::ElasticLoadBalancingV2::TargetGroup",
"AWS::ElasticLoadBalancingV2::LoadBalancer"]:
# Target group names need to be less than 32 characters, so when cloudformation creates a name for you # Target group names need to be less than 32 characters, so when cloudformation creates a name for you
# it makes sure to stay under that limit # it makes sure to stay under that limit
name_prefix = '{0}-{1}'.format(stack_name, logical_id) name_prefix = '{0}-{1}'.format(stack_name, logical_id)

View File

@ -4,6 +4,7 @@ import six
import random import random
import yaml import yaml
import os import os
import string
from cfnlint import decode, core from cfnlint import decode, core
@ -29,7 +30,7 @@ def generate_stackset_arn(stackset_id, region_name):
def random_suffix(): def random_suffix():
size = 12 size = 12
chars = list(range(10)) + ['A-Z'] chars = list(range(10)) + list(string.ascii_uppercase)
return ''.join(six.text_type(random.choice(chars)) for x in range(size)) return ''.join(six.text_type(random.choice(chars)) for x in range(size))

View File

@ -275,7 +275,7 @@ GET_METRIC_STATISTICS_TEMPLATE = """<GetMetricStatisticsResponse xmlns="http://m
<Label>{{ label }}</Label> <Label>{{ label }}</Label>
<Datapoints> <Datapoints>
{% for datapoint in datapoints %} {% for datapoint in datapoints %}
<Datapoint> <member>
{% if datapoint.sum is not none %} {% if datapoint.sum is not none %}
<Sum>{{ datapoint.sum }}</Sum> <Sum>{{ datapoint.sum }}</Sum>
{% endif %} {% endif %}
@ -302,7 +302,7 @@ GET_METRIC_STATISTICS_TEMPLATE = """<GetMetricStatisticsResponse xmlns="http://m
<Timestamp>{{ datapoint.timestamp }}</Timestamp> <Timestamp>{{ datapoint.timestamp }}</Timestamp>
<Unit>{{ datapoint.unit }}</Unit> <Unit>{{ datapoint.unit }}</Unit>
</Datapoint> </member>
{% endfor %} {% endfor %}
</Datapoints> </Datapoints>
</GetMetricStatisticsResult> </GetMetricStatisticsResult>

View File

@ -724,7 +724,7 @@ class Table(BaseModel):
if idx_col_set.issubset(set(hash_set.attrs)): if idx_col_set.issubset(set(hash_set.attrs)):
yield hash_set yield hash_set
def scan(self, filters, limit, exclusive_start_key, filter_expression=None, index_name=None): def scan(self, filters, limit, exclusive_start_key, filter_expression=None, index_name=None, projection_expression=None):
results = [] results = []
scanned_count = 0 scanned_count = 0
all_indexes = self.all_indexes() all_indexes = self.all_indexes()
@ -763,6 +763,14 @@ class Table(BaseModel):
if passes_all_conditions: if passes_all_conditions:
results.append(item) results.append(item)
if projection_expression:
expressions = [x.strip() for x in projection_expression.split(',')]
results = copy.deepcopy(results)
for result in results:
for attr in list(result.attrs):
if attr not in expressions:
result.attrs.pop(attr)
results, last_evaluated_key = self._trim_results(results, limit, results, last_evaluated_key = self._trim_results(results, limit,
exclusive_start_key, index_name) exclusive_start_key, index_name)
return results, scanned_count, last_evaluated_key return results, scanned_count, last_evaluated_key
@ -962,7 +970,7 @@ class DynamoDBBackend(BaseBackend):
return table.query(hash_key, range_comparison, range_values, limit, return table.query(hash_key, range_comparison, range_values, limit,
exclusive_start_key, scan_index_forward, projection_expression, index_name, filter_expression, **filter_kwargs) exclusive_start_key, scan_index_forward, projection_expression, index_name, filter_expression, **filter_kwargs)
def scan(self, table_name, filters, limit, exclusive_start_key, filter_expression, expr_names, expr_values, index_name): def scan(self, table_name, filters, limit, exclusive_start_key, filter_expression, expr_names, expr_values, index_name, projection_expression):
table = self.tables.get(table_name) table = self.tables.get(table_name)
if not table: if not table:
return None, None, None return None, None, None
@ -977,7 +985,9 @@ class DynamoDBBackend(BaseBackend):
else: else:
filter_expression = Op(None, None) # Will always eval to true filter_expression = Op(None, None) # Will always eval to true
return table.scan(scan_filters, limit, exclusive_start_key, filter_expression, index_name) projection_expression = ','.join([expr_names.get(attr, attr) for attr in projection_expression.replace(' ', '').split(',')])
return table.scan(scan_filters, limit, exclusive_start_key, filter_expression, index_name, projection_expression)
def update_item(self, table_name, key, update_expression, attribute_updates, expression_attribute_names, def update_item(self, table_name, key, update_expression, attribute_updates, expression_attribute_names,
expression_attribute_values, expected=None): expression_attribute_values, expected=None):

View File

@ -166,7 +166,7 @@ class DynamoHandler(BaseResponse):
when BillingMode is PAY_PER_REQUEST') when BillingMode is PAY_PER_REQUEST')
throughput = None throughput = None
else: # Provisioned (default billing mode) else: # Provisioned (default billing mode)
throughput = body["ProvisionedThroughput"] throughput = body.get("ProvisionedThroughput")
# getting the schema # getting the schema
key_schema = body['KeySchema'] key_schema = body['KeySchema']
# getting attribute definition # getting attribute definition
@ -558,7 +558,7 @@ class DynamoHandler(BaseResponse):
filter_expression = self.body.get('FilterExpression') filter_expression = self.body.get('FilterExpression')
expression_attribute_values = self.body.get('ExpressionAttributeValues', {}) expression_attribute_values = self.body.get('ExpressionAttributeValues', {})
expression_attribute_names = self.body.get('ExpressionAttributeNames', {}) expression_attribute_names = self.body.get('ExpressionAttributeNames', {})
projection_expression = self.body.get('ProjectionExpression', '')
exclusive_start_key = self.body.get('ExclusiveStartKey') exclusive_start_key = self.body.get('ExclusiveStartKey')
limit = self.body.get("Limit") limit = self.body.get("Limit")
index_name = self.body.get('IndexName') index_name = self.body.get('IndexName')
@ -570,7 +570,8 @@ class DynamoHandler(BaseResponse):
filter_expression, filter_expression,
expression_attribute_names, expression_attribute_names,
expression_attribute_values, expression_attribute_values,
index_name) index_name,
projection_expression)
except InvalidIndexNameError as err: except InvalidIndexNameError as err:
er = 'com.amazonaws.dynamodb.v20111205#ValidationException' er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
return self.error(er, str(err)) return self.error(er, str(err))

View File

@ -332,6 +332,15 @@ class InvalidParameterValueErrorTagNull(EC2ClientError):
"Tag value cannot be null. Use empty string instead.") "Tag value cannot be null. Use empty string instead.")
class InvalidParameterValueErrorUnknownAttribute(EC2ClientError):
def __init__(self, parameter_value):
super(InvalidParameterValueErrorUnknownAttribute, self).__init__(
"InvalidParameterValue",
"Value ({0}) for parameter attribute is invalid. Unknown attribute."
.format(parameter_value))
class InvalidInternetGatewayIdError(EC2ClientError): class InvalidInternetGatewayIdError(EC2ClientError):
def __init__(self, internet_gateway_id): def __init__(self, internet_gateway_id):

View File

@ -54,6 +54,7 @@ from .exceptions import (
InvalidNetworkInterfaceIdError, InvalidNetworkInterfaceIdError,
InvalidParameterValueError, InvalidParameterValueError,
InvalidParameterValueErrorTagNull, InvalidParameterValueErrorTagNull,
InvalidParameterValueErrorUnknownAttribute,
InvalidPermissionNotFoundError, InvalidPermissionNotFoundError,
InvalidPermissionDuplicateError, InvalidPermissionDuplicateError,
InvalidRouteTableIdError, InvalidRouteTableIdError,
@ -383,6 +384,10 @@ class NetworkInterfaceBackend(object):
class Instance(TaggedEC2Resource, BotoInstance): class Instance(TaggedEC2Resource, BotoInstance):
VALID_ATTRIBUTES = {'instanceType', 'kernel', 'ramdisk', 'userData', 'disableApiTermination',
'instanceInitiatedShutdownBehavior', 'rootDeviceName', 'blockDeviceMapping',
'productCodes', 'sourceDestCheck', 'groupSet', 'ebsOptimized', 'sriovNetSupport'}
def __init__(self, ec2_backend, image_id, user_data, security_groups, **kwargs): def __init__(self, ec2_backend, image_id, user_data, security_groups, **kwargs):
super(Instance, self).__init__() super(Instance, self).__init__()
self.ec2_backend = ec2_backend self.ec2_backend = ec2_backend
@ -405,6 +410,8 @@ class Instance(TaggedEC2Resource, BotoInstance):
self.launch_time = utc_date_and_time() self.launch_time = utc_date_and_time()
self.ami_launch_index = kwargs.get("ami_launch_index", 0) self.ami_launch_index = kwargs.get("ami_launch_index", 0)
self.disable_api_termination = kwargs.get("disable_api_termination", False) self.disable_api_termination = kwargs.get("disable_api_termination", False)
self.instance_initiated_shutdown_behavior = kwargs.get("instance_initiated_shutdown_behavior", "stop")
self.sriov_net_support = "simple"
self._spot_fleet_id = kwargs.get("spot_fleet_id", None) self._spot_fleet_id = kwargs.get("spot_fleet_id", None)
associate_public_ip = kwargs.get("associate_public_ip", False) associate_public_ip = kwargs.get("associate_public_ip", False)
if in_ec2_classic: if in_ec2_classic:
@ -788,14 +795,22 @@ class InstanceBackend(object):
setattr(instance, key, value) setattr(instance, key, value)
return instance return instance
def modify_instance_security_groups(self, instance_id, new_group_list): def modify_instance_security_groups(self, instance_id, new_group_id_list):
instance = self.get_instance(instance_id) instance = self.get_instance(instance_id)
new_group_list = []
for new_group_id in new_group_id_list:
new_group_list.append(self.get_security_group_from_id(new_group_id))
setattr(instance, 'security_groups', new_group_list) setattr(instance, 'security_groups', new_group_list)
return instance return instance
def describe_instance_attribute(self, instance_id, key): def describe_instance_attribute(self, instance_id, attribute):
if key == 'group_set': if attribute not in Instance.VALID_ATTRIBUTES:
raise InvalidParameterValueErrorUnknownAttribute(attribute)
if attribute == 'groupSet':
key = 'security_groups' key = 'security_groups'
else:
key = camelcase_to_underscores(attribute)
instance = self.get_instance(instance_id) instance = self.get_instance(instance_id)
value = getattr(instance, key) value = getattr(instance, key)
return instance, value return instance, value

View File

@ -46,6 +46,7 @@ class InstanceResponse(BaseResponse):
associate_public_ip = self._get_param('AssociatePublicIpAddress') associate_public_ip = self._get_param('AssociatePublicIpAddress')
key_name = self._get_param('KeyName') key_name = self._get_param('KeyName')
ebs_optimized = self._get_param('EbsOptimized') ebs_optimized = self._get_param('EbsOptimized')
instance_initiated_shutdown_behavior = self._get_param("InstanceInitiatedShutdownBehavior")
tags = self._parse_tag_specification("TagSpecification") tags = self._parse_tag_specification("TagSpecification")
region_name = self.region region_name = self.region
@ -55,7 +56,7 @@ class InstanceResponse(BaseResponse):
instance_type=instance_type, placement=placement, region_name=region_name, subnet_id=subnet_id, instance_type=instance_type, placement=placement, region_name=region_name, subnet_id=subnet_id,
owner_id=owner_id, key_name=key_name, security_group_ids=security_group_ids, owner_id=owner_id, key_name=key_name, security_group_ids=security_group_ids,
nics=nics, private_ip=private_ip, associate_public_ip=associate_public_ip, nics=nics, private_ip=private_ip, associate_public_ip=associate_public_ip,
tags=tags, ebs_optimized=ebs_optimized) tags=tags, ebs_optimized=ebs_optimized, instance_initiated_shutdown_behavior=instance_initiated_shutdown_behavior)
template = self.response_template(EC2_RUN_INSTANCES) template = self.response_template(EC2_RUN_INSTANCES)
return template.render(reservation=new_reservation) return template.render(reservation=new_reservation)
@ -113,12 +114,11 @@ class InstanceResponse(BaseResponse):
# TODO this and modify below should raise IncorrectInstanceState if # TODO this and modify below should raise IncorrectInstanceState if
# instance not in stopped state # instance not in stopped state
attribute = self._get_param('Attribute') attribute = self._get_param('Attribute')
key = camelcase_to_underscores(attribute)
instance_id = self._get_param('InstanceId') instance_id = self._get_param('InstanceId')
instance, value = self.ec2_backend.describe_instance_attribute( instance, value = self.ec2_backend.describe_instance_attribute(
instance_id, key) instance_id, attribute)
if key == "group_set": if attribute == "groupSet":
template = self.response_template( template = self.response_template(
EC2_DESCRIBE_INSTANCE_GROUPSET_ATTRIBUTE) EC2_DESCRIBE_INSTANCE_GROUPSET_ATTRIBUTE)
else: else:
@ -597,7 +597,9 @@ EC2_DESCRIBE_INSTANCE_ATTRIBUTE = """<DescribeInstanceAttributeResponse xmlns="h
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId> <requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<instanceId>{{ instance.id }}</instanceId> <instanceId>{{ instance.id }}</instanceId>
<{{ attribute }}> <{{ attribute }}>
{% if value is not none %}
<value>{{ value }}</value> <value>{{ value }}</value>
{% endif %}
</{{ attribute }}> </{{ attribute }}>
</DescribeInstanceAttributeResponse>""" </DescribeInstanceAttributeResponse>"""
@ -605,9 +607,9 @@ EC2_DESCRIBE_INSTANCE_GROUPSET_ATTRIBUTE = """<DescribeInstanceAttributeResponse
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId> <requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<instanceId>{{ instance.id }}</instanceId> <instanceId>{{ instance.id }}</instanceId>
<{{ attribute }}> <{{ attribute }}>
{% for sg_id in value %} {% for sg in value %}
<item> <item>
<groupId>{{ sg_id }}</groupId> <groupId>{{ sg.id }}</groupId>
</item> </item>
{% endfor %} {% endfor %}
</{{ attribute }}> </{{ attribute }}>

View File

@ -3,6 +3,7 @@ from __future__ import unicode_literals
import hashlib import hashlib
import re import re
from copy import copy from copy import copy
from datetime import datetime
from random import random from random import random
from botocore.exceptions import ParamValidationError from botocore.exceptions import ParamValidationError
@ -106,7 +107,7 @@ class Image(BaseObject):
self.repository = repository self.repository = repository
self.registry_id = registry_id self.registry_id = registry_id
self.image_digest = digest self.image_digest = digest
self.image_pushed_at = None self.image_pushed_at = str(datetime.utcnow().isoformat())
def _create_digest(self): def _create_digest(self):
image_contents = 'docker_image{0}'.format(int(random() * 10 ** 6)) image_contents = 'docker_image{0}'.format(int(random() * 10 ** 6))
@ -158,7 +159,7 @@ class Image(BaseObject):
response_object['repositoryName'] = self.repository response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id response_object['registryId'] = self.registry_id
response_object['imageSizeInBytes'] = self.image_size_in_bytes response_object['imageSizeInBytes'] = self.image_size_in_bytes
response_object['imagePushedAt'] = '2017-05-09' response_object['imagePushedAt'] = self.image_pushed_at
return {k: v for k, v in response_object.items() if v is not None and v != []} return {k: v for k, v in response_object.items() if v is not None and v != []}
@property @property
@ -402,7 +403,10 @@ class ECRBackend(BaseBackend):
image_found = True image_found = True
repository.images[num].image_tag = image_id["imageTag"] repository.images[num].image_tag = image_id["imageTag"]
response["imageIds"].append(image.response_batch_delete_image) response["imageIds"].append(image.response_batch_delete_image)
repository.images[num].remove_tag(image_id["imageTag"]) if len(image.image_tags) > 1:
repository.images[num].remove_tag(image_id["imageTag"])
else:
repository.images.remove(image)
if not image_found: if not image_found:
failure_response = { failure_response = {

View File

@ -138,6 +138,12 @@ class FakeTable(BaseModel):
raise PartitionAlreadyExistsException() raise PartitionAlreadyExistsException()
self.partitions[key] = partition self.partitions[key] = partition
def delete_partition(self, values):
try:
del self.partitions[str(values)]
except KeyError:
raise PartitionNotFoundException()
class FakePartition(BaseModel): class FakePartition(BaseModel):
def __init__(self, database_name, table_name, partiton_input): def __init__(self, database_name, table_name, partiton_input):

View File

@ -4,6 +4,11 @@ import json
from moto.core.responses import BaseResponse from moto.core.responses import BaseResponse
from .models import glue_backend from .models import glue_backend
from .exceptions import (
PartitionAlreadyExistsException,
PartitionNotFoundException,
TableNotFoundException
)
class GlueResponse(BaseResponse): class GlueResponse(BaseResponse):
@ -90,6 +95,28 @@ class GlueResponse(BaseResponse):
resp = self.glue_backend.delete_table(database_name, table_name) resp = self.glue_backend.delete_table(database_name, table_name)
return json.dumps(resp) return json.dumps(resp)
def batch_delete_table(self):
database_name = self.parameters.get('DatabaseName')
errors = []
for table_name in self.parameters.get('TablesToDelete'):
try:
self.glue_backend.delete_table(database_name, table_name)
except TableNotFoundException:
errors.append({
"TableName": table_name,
"ErrorDetail": {
"ErrorCode": "EntityNotFoundException",
"ErrorMessage": "Table not found"
}
})
out = {}
if errors:
out["Errors"] = errors
return json.dumps(out)
def get_partitions(self): def get_partitions(self):
database_name = self.parameters.get('DatabaseName') database_name = self.parameters.get('DatabaseName')
table_name = self.parameters.get('TableName') table_name = self.parameters.get('TableName')
@ -124,6 +151,30 @@ class GlueResponse(BaseResponse):
return "" return ""
def batch_create_partition(self):
database_name = self.parameters.get('DatabaseName')
table_name = self.parameters.get('TableName')
table = self.glue_backend.get_table(database_name, table_name)
errors_output = []
for part_input in self.parameters.get('PartitionInputList'):
try:
table.create_partition(part_input)
except PartitionAlreadyExistsException:
errors_output.append({
'PartitionValues': part_input['Values'],
'ErrorDetail': {
'ErrorCode': 'AlreadyExistsException',
'ErrorMessage': 'Partition already exists.'
}
})
out = {}
if errors_output:
out["Errors"] = errors_output
return json.dumps(out)
def update_partition(self): def update_partition(self):
database_name = self.parameters.get('DatabaseName') database_name = self.parameters.get('DatabaseName')
table_name = self.parameters.get('TableName') table_name = self.parameters.get('TableName')
@ -134,3 +185,38 @@ class GlueResponse(BaseResponse):
table.update_partition(part_to_update, part_input) table.update_partition(part_to_update, part_input)
return "" return ""
def delete_partition(self):
database_name = self.parameters.get('DatabaseName')
table_name = self.parameters.get('TableName')
part_to_delete = self.parameters.get('PartitionValues')
table = self.glue_backend.get_table(database_name, table_name)
table.delete_partition(part_to_delete)
return ""
def batch_delete_partition(self):
database_name = self.parameters.get('DatabaseName')
table_name = self.parameters.get('TableName')
table = self.glue_backend.get_table(database_name, table_name)
errors_output = []
for part_input in self.parameters.get('PartitionsToDelete'):
values = part_input.get('Values')
try:
table.delete_partition(values)
except PartitionNotFoundException:
errors_output.append({
'PartitionValues': values,
'ErrorDetail': {
'ErrorCode': 'EntityNotFoundException',
'ErrorMessage': 'Partition not found',
}
})
out = {}
if errors_output:
out['Errors'] = errors_output
return json.dumps(out)

File diff suppressed because it is too large Load Diff

View File

@ -26,6 +26,14 @@ class IAMReportNotPresentException(RESTError):
"ReportNotPresent", message) "ReportNotPresent", message)
class IAMLimitExceededException(RESTError):
code = 400
def __init__(self, message):
super(IAMLimitExceededException, self).__init__(
"LimitExceeded", message)
class MalformedCertificate(RESTError): class MalformedCertificate(RESTError):
code = 400 code = 400
@ -34,6 +42,14 @@ class MalformedCertificate(RESTError):
'MalformedCertificate', 'Certificate {cert} is malformed'.format(cert=cert)) 'MalformedCertificate', 'Certificate {cert} is malformed'.format(cert=cert))
class MalformedPolicyDocument(RESTError):
code = 400
def __init__(self, message=""):
super(MalformedPolicyDocument, self).__init__(
'MalformedPolicyDocument', message)
class DuplicateTags(RESTError): class DuplicateTags(RESTError):
code = 400 code = 400

View File

@ -8,14 +8,14 @@ import re
from cryptography import x509 from cryptography import x509
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
import pytz
from moto.core.exceptions import RESTError from moto.core.exceptions import RESTError
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_without_milliseconds from moto.core.utils import iso_8601_datetime_without_milliseconds, iso_8601_datetime_with_milliseconds
from moto.iam.policy_validation import IAMPolicyDocumentValidator
from .aws_managed_policies import aws_managed_policies_data from .aws_managed_policies import aws_managed_policies_data
from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException, MalformedCertificate, \ from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException, IAMLimitExceededException, \
DuplicateTags, TagKeyTooBig, InvalidTagCharacters, TooManyTags, TagValueTooBig MalformedCertificate, DuplicateTags, TagKeyTooBig, InvalidTagCharacters, TooManyTags, TagValueTooBig
from .utils import random_access_key, random_alphanumeric, random_resource_id, random_policy_id from .utils import random_access_key, random_alphanumeric, random_resource_id, random_policy_id
ACCOUNT_ID = 123456789012 ACCOUNT_ID = 123456789012
@ -28,11 +28,15 @@ class MFADevice(object):
serial_number, serial_number,
authentication_code_1, authentication_code_1,
authentication_code_2): authentication_code_2):
self.enable_date = datetime.now(pytz.utc) self.enable_date = datetime.utcnow()
self.serial_number = serial_number self.serial_number = serial_number
self.authentication_code_1 = authentication_code_1 self.authentication_code_1 = authentication_code_1
self.authentication_code_2 = authentication_code_2 self.authentication_code_2 = authentication_code_2
@property
def enabled_iso_8601(self):
return iso_8601_datetime_without_milliseconds(self.enable_date)
class Policy(BaseModel): class Policy(BaseModel):
is_attachable = False is_attachable = False
@ -42,7 +46,9 @@ class Policy(BaseModel):
default_version_id=None, default_version_id=None,
description=None, description=None,
document=None, document=None,
path=None): path=None,
create_date=None,
update_date=None):
self.name = name self.name = name
self.attachment_count = 0 self.attachment_count = 0
@ -56,10 +62,25 @@ class Policy(BaseModel):
else: else:
self.default_version_id = 'v1' self.default_version_id = 'v1'
self.next_version_num = 2 self.next_version_num = 2
self.versions = [PolicyVersion(self.arn, document, True)] self.versions = [PolicyVersion(self.arn, document, True, self.default_version_id, update_date)]
self.create_datetime = datetime.now(pytz.utc) self.create_date = create_date if create_date is not None else datetime.utcnow()
self.update_datetime = datetime.now(pytz.utc) self.update_date = update_date if update_date is not None else datetime.utcnow()
def update_default_version(self, new_default_version_id):
for version in self.versions:
if version.version_id == self.default_version_id:
version.is_default = False
break
self.default_version_id = new_default_version_id
@property
def created_iso_8601(self):
return iso_8601_datetime_with_milliseconds(self.create_date)
@property
def updated_iso_8601(self):
return iso_8601_datetime_with_milliseconds(self.update_date)
class SAMLProvider(BaseModel): class SAMLProvider(BaseModel):
@ -77,13 +98,19 @@ class PolicyVersion(object):
def __init__(self, def __init__(self,
policy_arn, policy_arn,
document, document,
is_default=False): is_default=False,
version_id='v1',
create_date=None):
self.policy_arn = policy_arn self.policy_arn = policy_arn
self.document = document or {} self.document = document or {}
self.is_default = is_default self.is_default = is_default
self.version_id = 'v1' self.version_id = version_id
self.create_datetime = datetime.now(pytz.utc) self.create_date = create_date if create_date is not None else datetime.utcnow()
@property
def created_iso_8601(self):
return iso_8601_datetime_with_milliseconds(self.create_date)
class ManagedPolicy(Policy): class ManagedPolicy(Policy):
@ -112,7 +139,9 @@ class AWSManagedPolicy(ManagedPolicy):
return cls(name, return cls(name,
default_version_id=data.get('DefaultVersionId'), default_version_id=data.get('DefaultVersionId'),
path=data.get('Path'), path=data.get('Path'),
document=data.get('Document')) document=json.dumps(data.get('Document')),
create_date=datetime.strptime(data.get('CreateDate'), "%Y-%m-%dT%H:%M:%S+00:00"),
update_date=datetime.strptime(data.get('UpdateDate'), "%Y-%m-%dT%H:%M:%S+00:00"))
@property @property
def arn(self): def arn(self):
@ -139,11 +168,15 @@ class Role(BaseModel):
self.path = path or '/' self.path = path or '/'
self.policies = {} self.policies = {}
self.managed_policies = {} self.managed_policies = {}
self.create_date = datetime.now(pytz.utc) self.create_date = datetime.utcnow()
self.tags = {} self.tags = {}
self.description = "" self.description = ""
self.permissions_boundary = permissions_boundary self.permissions_boundary = permissions_boundary
@property
def created_iso_8601(self):
return iso_8601_datetime_with_milliseconds(self.create_date)
@classmethod @classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties'] properties = cloudformation_json['Properties']
@ -198,7 +231,11 @@ class InstanceProfile(BaseModel):
self.name = name self.name = name
self.path = path or '/' self.path = path or '/'
self.roles = roles if roles else [] self.roles = roles if roles else []
self.create_date = datetime.now(pytz.utc) self.create_date = datetime.utcnow()
@property
def created_iso_8601(self):
return iso_8601_datetime_with_milliseconds(self.create_date)
@classmethod @classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name): def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
@ -250,25 +287,31 @@ class SigningCertificate(BaseModel):
self.id = id self.id = id
self.user_name = user_name self.user_name = user_name
self.body = body self.body = body
self.upload_date = datetime.strftime(datetime.utcnow(), "%Y-%m-%d-%H-%M-%S") self.upload_date = datetime.utcnow()
self.status = 'Active' self.status = 'Active'
@property
def uploaded_iso_8601(self):
return iso_8601_datetime_without_milliseconds(self.upload_date)
class AccessKey(BaseModel): class AccessKey(BaseModel):
def __init__(self, user_name): def __init__(self, user_name):
self.user_name = user_name self.user_name = user_name
self.access_key_id = random_access_key() self.access_key_id = "AKIA" + random_access_key()
self.secret_access_key = random_alphanumeric(32) self.secret_access_key = random_alphanumeric(40)
self.status = 'Active' self.status = 'Active'
self.create_date = datetime.strftime( self.create_date = datetime.utcnow()
datetime.utcnow(), self.last_used = datetime.utcnow()
"%Y-%m-%dT%H:%M:%SZ"
) @property
self.last_used = datetime.strftime( def created_iso_8601(self):
datetime.utcnow(), return iso_8601_datetime_without_milliseconds(self.create_date)
"%Y-%m-%dT%H:%M:%SZ"
) @property
def last_used_iso_8601(self):
return iso_8601_datetime_without_milliseconds(self.last_used)
def get_cfn_attribute(self, attribute_name): def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@ -283,15 +326,16 @@ class Group(BaseModel):
self.name = name self.name = name
self.id = random_resource_id() self.id = random_resource_id()
self.path = path self.path = path
self.created = datetime.strftime( self.create_date = datetime.utcnow()
datetime.utcnow(),
"%Y-%m-%d-%H-%M-%S"
)
self.users = [] self.users = []
self.managed_policies = {} self.managed_policies = {}
self.policies = {} self.policies = {}
@property
def created_iso_8601(self):
return iso_8601_datetime_with_milliseconds(self.create_date)
def get_cfn_attribute(self, attribute_name): def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'Arn': if attribute_name == 'Arn':
@ -306,10 +350,6 @@ class Group(BaseModel):
else: else:
return "arn:aws:iam::{0}:group/{1}/{2}".format(ACCOUNT_ID, self.path, self.name) return "arn:aws:iam::{0}:group/{1}/{2}".format(ACCOUNT_ID, self.path, self.name)
@property
def create_date(self):
return self.created
def get_policy(self, policy_name): def get_policy(self, policy_name):
try: try:
policy_json = self.policies[policy_name] policy_json = self.policies[policy_name]
@ -335,7 +375,7 @@ class User(BaseModel):
self.name = name self.name = name
self.id = random_resource_id() self.id = random_resource_id()
self.path = path if path else "/" self.path = path if path else "/"
self.created = datetime.utcnow() self.create_date = datetime.utcnow()
self.mfa_devices = {} self.mfa_devices = {}
self.policies = {} self.policies = {}
self.managed_policies = {} self.managed_policies = {}
@ -350,7 +390,7 @@ class User(BaseModel):
@property @property
def created_iso_8601(self): def created_iso_8601(self):
return iso_8601_datetime_without_milliseconds(self.created) return iso_8601_datetime_with_milliseconds(self.create_date)
def get_policy(self, policy_name): def get_policy(self, policy_name):
policy_json = None policy_json = None
@ -421,7 +461,7 @@ class User(BaseModel):
def to_csv(self): def to_csv(self):
date_format = '%Y-%m-%dT%H:%M:%S+00:00' date_format = '%Y-%m-%dT%H:%M:%S+00:00'
date_created = self.created date_created = self.create_date
# aagrawal,arn:aws:iam::509284790694:user/aagrawal,2014-09-01T22:28:48+00:00,true,2014-11-12T23:36:49+00:00,2014-09-03T18:59:00+00:00,N/A,false,true,2014-09-01T22:28:48+00:00,false,N/A,false,N/A,false,N/A # aagrawal,arn:aws:iam::509284790694:user/aagrawal,2014-09-01T22:28:48+00:00,true,2014-11-12T23:36:49+00:00,2014-09-03T18:59:00+00:00,N/A,false,true,2014-09-01T22:28:48+00:00,false,N/A,false,N/A,false,N/A
if not self.password: if not self.password:
password_enabled = 'false' password_enabled = 'false'
@ -478,7 +518,7 @@ class IAMBackend(BaseBackend):
super(IAMBackend, self).__init__() super(IAMBackend, self).__init__()
def _init_managed_policies(self): def _init_managed_policies(self):
return dict((p.name, p) for p in aws_managed_policies) return dict((p.arn, p) for p in aws_managed_policies)
def attach_role_policy(self, policy_arn, role_name): def attach_role_policy(self, policy_arn, role_name):
arns = dict((p.arn, p) for p in self.managed_policies.values()) arns = dict((p.arn, p) for p in self.managed_policies.values())
@ -536,6 +576,9 @@ class IAMBackend(BaseBackend):
policy.detach_from(self.get_user(user_name)) policy.detach_from(self.get_user(user_name))
def create_policy(self, description, path, policy_document, policy_name): def create_policy(self, description, path, policy_document, policy_name):
iam_policy_document_validator = IAMPolicyDocumentValidator(policy_document)
iam_policy_document_validator.validate()
policy = ManagedPolicy( policy = ManagedPolicy(
policy_name, policy_name,
description=description, description=description,
@ -628,6 +671,9 @@ class IAMBackend(BaseBackend):
def put_role_policy(self, role_name, policy_name, policy_json): def put_role_policy(self, role_name, policy_name, policy_json):
role = self.get_role(role_name) role = self.get_role(role_name)
iam_policy_document_validator = IAMPolicyDocumentValidator(policy_json)
iam_policy_document_validator.validate()
role.put_policy(policy_name, policy_json) role.put_policy(policy_name, policy_json)
def delete_role_policy(self, role_name, policy_name): def delete_role_policy(self, role_name, policy_name):
@ -639,6 +685,7 @@ class IAMBackend(BaseBackend):
for p, d in role.policies.items(): for p, d in role.policies.items():
if p == policy_name: if p == policy_name:
return p, d return p, d
raise IAMNotFoundException("Policy Document {0} not attached to role {1}".format(policy_name, role_name))
def list_role_policies(self, role_name): def list_role_policies(self, role_name):
role = self.get_role(role_name) role = self.get_role(role_name)
@ -725,15 +772,21 @@ class IAMBackend(BaseBackend):
role.tags.pop(ref_key, None) role.tags.pop(ref_key, None)
def create_policy_version(self, policy_arn, policy_document, set_as_default): def create_policy_version(self, policy_arn, policy_document, set_as_default):
iam_policy_document_validator = IAMPolicyDocumentValidator(policy_document)
iam_policy_document_validator.validate()
policy = self.get_policy(policy_arn) policy = self.get_policy(policy_arn)
if not policy: if not policy:
raise IAMNotFoundException("Policy not found") raise IAMNotFoundException("Policy not found")
if len(policy.versions) >= 5:
raise IAMLimitExceededException("A managed policy can have up to 5 versions. Before you create a new version, you must delete an existing version.")
set_as_default = (set_as_default == "true") # convert it to python bool
version = PolicyVersion(policy_arn, policy_document, set_as_default) version = PolicyVersion(policy_arn, policy_document, set_as_default)
policy.versions.append(version) policy.versions.append(version)
version.version_id = 'v{0}'.format(policy.next_version_num) version.version_id = 'v{0}'.format(policy.next_version_num)
policy.next_version_num += 1 policy.next_version_num += 1
if set_as_default: if set_as_default:
policy.default_version_id = version.version_id policy.update_default_version(version.version_id)
return version return version
def get_policy_version(self, policy_arn, version_id): def get_policy_version(self, policy_arn, version_id):
@ -756,8 +809,8 @@ class IAMBackend(BaseBackend):
if not policy: if not policy:
raise IAMNotFoundException("Policy not found") raise IAMNotFoundException("Policy not found")
if version_id == policy.default_version_id: if version_id == policy.default_version_id:
raise IAMConflictException( raise IAMConflictException(code="DeleteConflict",
"Cannot delete the default version of a policy") message="Cannot delete the default version of a policy.")
for i, v in enumerate(policy.versions): for i, v in enumerate(policy.versions):
if v.version_id == version_id: if v.version_id == version_id:
del policy.versions[i] del policy.versions[i]
@ -869,6 +922,9 @@ class IAMBackend(BaseBackend):
def put_group_policy(self, group_name, policy_name, policy_json): def put_group_policy(self, group_name, policy_name, policy_json):
group = self.get_group(group_name) group = self.get_group(group_name)
iam_policy_document_validator = IAMPolicyDocumentValidator(policy_json)
iam_policy_document_validator.validate()
group.put_policy(policy_name, policy_json) group.put_policy(policy_name, policy_json)
def list_group_policies(self, group_name, marker=None, max_items=None): def list_group_policies(self, group_name, marker=None, max_items=None):
@ -1029,6 +1085,9 @@ class IAMBackend(BaseBackend):
def put_user_policy(self, user_name, policy_name, policy_json): def put_user_policy(self, user_name, policy_name, policy_json):
user = self.get_user(user_name) user = self.get_user(user_name)
iam_policy_document_validator = IAMPolicyDocumentValidator(policy_json)
iam_policy_document_validator.validate()
user.put_policy(policy_name, policy_json) user.put_policy(policy_name, policy_json)
def delete_user_policy(self, user_name, policy_name): def delete_user_policy(self, user_name, policy_name):
@ -1050,7 +1109,7 @@ class IAMBackend(BaseBackend):
if key.access_key_id == access_key_id: if key.access_key_id == access_key_id:
return { return {
'user_name': key.user_name, 'user_name': key.user_name,
'last_used': key.last_used 'last_used': key.last_used_iso_8601,
} }
else: else:
raise IAMNotFoundException( raise IAMNotFoundException(

View File

@ -0,0 +1,450 @@
import json
import re
from six import string_types
from moto.iam.exceptions import MalformedPolicyDocument
VALID_TOP_ELEMENTS = [
"Version",
"Id",
"Statement",
"Conditions"
]
VALID_VERSIONS = [
"2008-10-17",
"2012-10-17"
]
VALID_STATEMENT_ELEMENTS = [
"Sid",
"Action",
"NotAction",
"Resource",
"NotResource",
"Effect",
"Condition"
]
VALID_EFFECTS = [
"Allow",
"Deny"
]
VALID_CONDITIONS = [
"StringEquals",
"StringNotEquals",
"StringEqualsIgnoreCase",
"StringNotEqualsIgnoreCase",
"StringLike",
"StringNotLike",
"NumericEquals",
"NumericNotEquals",
"NumericLessThan",
"NumericLessThanEquals",
"NumericGreaterThan",
"NumericGreaterThanEquals",
"DateEquals",
"DateNotEquals",
"DateLessThan",
"DateLessThanEquals",
"DateGreaterThan",
"DateGreaterThanEquals",
"Bool",
"BinaryEquals",
"IpAddress",
"NotIpAddress",
"ArnEquals",
"ArnLike",
"ArnNotEquals",
"ArnNotLike",
"Null"
]
VALID_CONDITION_PREFIXES = [
"ForAnyValue:",
"ForAllValues:"
]
VALID_CONDITION_POSTFIXES = [
"IfExists"
]
SERVICE_TYPE_REGION_INFORMATION_ERROR_ASSOCIATIONS = {
"iam": 'IAM resource {resource} cannot contain region information.',
"s3": 'Resource {resource} can not contain region information.'
}
VALID_RESOURCE_PATH_STARTING_VALUES = {
"iam": {
"values": ["user/", "federated-user/", "role/", "group/", "instance-profile/", "mfa/", "server-certificate/",
"policy/", "sms-mfa/", "saml-provider/", "oidc-provider/", "report/", "access-report/"],
"error_message": 'IAM resource path must either be "*" or start with {values}.'
}
}
class IAMPolicyDocumentValidator:
def __init__(self, policy_document):
self._policy_document = policy_document
self._policy_json = {}
self._statements = []
self._resource_error = "" # the first resource error found that does not generate a legacy parsing error
def validate(self):
try:
self._validate_syntax()
except Exception:
raise MalformedPolicyDocument("Syntax errors in policy.")
try:
self._validate_version()
except Exception:
raise MalformedPolicyDocument("Policy document must be version 2012-10-17 or greater.")
try:
self._perform_first_legacy_parsing()
self._validate_resources_for_formats()
self._validate_not_resources_for_formats()
except Exception:
raise MalformedPolicyDocument("The policy failed legacy parsing")
try:
self._validate_sid_uniqueness()
except Exception:
raise MalformedPolicyDocument("Statement IDs (SID) in a single policy must be unique.")
try:
self._validate_action_like_exist()
except Exception:
raise MalformedPolicyDocument("Policy statement must contain actions.")
try:
self._validate_resource_exist()
except Exception:
raise MalformedPolicyDocument("Policy statement must contain resources.")
if self._resource_error != "":
raise MalformedPolicyDocument(self._resource_error)
self._validate_actions_for_prefixes()
self._validate_not_actions_for_prefixes()
def _validate_syntax(self):
self._policy_json = json.loads(self._policy_document)
assert isinstance(self._policy_json, dict)
self._validate_top_elements()
self._validate_version_syntax()
self._validate_id_syntax()
self._validate_statements_syntax()
def _validate_top_elements(self):
top_elements = self._policy_json.keys()
for element in top_elements:
assert element in VALID_TOP_ELEMENTS
def _validate_version_syntax(self):
if "Version" in self._policy_json:
assert self._policy_json["Version"] in VALID_VERSIONS
def _validate_version(self):
assert self._policy_json["Version"] == "2012-10-17"
def _validate_sid_uniqueness(self):
sids = []
for statement in self._statements:
if "Sid" in statement:
assert statement["Sid"] not in sids
sids.append(statement["Sid"])
def _validate_statements_syntax(self):
assert "Statement" in self._policy_json
assert isinstance(self._policy_json["Statement"], (dict, list))
if isinstance(self._policy_json["Statement"], dict):
self._statements.append(self._policy_json["Statement"])
else:
self._statements += self._policy_json["Statement"]
assert self._statements
for statement in self._statements:
self._validate_statement_syntax(statement)
@staticmethod
def _validate_statement_syntax(statement):
assert isinstance(statement, dict)
for statement_element in statement.keys():
assert statement_element in VALID_STATEMENT_ELEMENTS
assert ("Resource" not in statement or "NotResource" not in statement)
assert ("Action" not in statement or "NotAction" not in statement)
IAMPolicyDocumentValidator._validate_effect_syntax(statement)
IAMPolicyDocumentValidator._validate_action_syntax(statement)
IAMPolicyDocumentValidator._validate_not_action_syntax(statement)
IAMPolicyDocumentValidator._validate_resource_syntax(statement)
IAMPolicyDocumentValidator._validate_not_resource_syntax(statement)
IAMPolicyDocumentValidator._validate_condition_syntax(statement)
IAMPolicyDocumentValidator._validate_sid_syntax(statement)
@staticmethod
def _validate_effect_syntax(statement):
assert "Effect" in statement
assert isinstance(statement["Effect"], string_types)
assert statement["Effect"].lower() in [allowed_effect.lower() for allowed_effect in VALID_EFFECTS]
@staticmethod
def _validate_action_syntax(statement):
IAMPolicyDocumentValidator._validate_string_or_list_of_strings_syntax(statement, "Action")
@staticmethod
def _validate_not_action_syntax(statement):
IAMPolicyDocumentValidator._validate_string_or_list_of_strings_syntax(statement, "NotAction")
@staticmethod
def _validate_resource_syntax(statement):
IAMPolicyDocumentValidator._validate_string_or_list_of_strings_syntax(statement, "Resource")
@staticmethod
def _validate_not_resource_syntax(statement):
IAMPolicyDocumentValidator._validate_string_or_list_of_strings_syntax(statement, "NotResource")
@staticmethod
def _validate_string_or_list_of_strings_syntax(statement, key):
if key in statement:
assert isinstance(statement[key], (string_types, list))
if isinstance(statement[key], list):
for resource in statement[key]:
assert isinstance(resource, string_types)
@staticmethod
def _validate_condition_syntax(statement):
if "Condition" in statement:
assert isinstance(statement["Condition"], dict)
for condition_key, condition_value in statement["Condition"].items():
assert isinstance(condition_value, dict)
for condition_element_key, condition_element_value in condition_value.items():
assert isinstance(condition_element_value, (list, string_types))
if IAMPolicyDocumentValidator._strip_condition_key(condition_key) not in VALID_CONDITIONS:
assert not condition_value # empty dict
@staticmethod
def _strip_condition_key(condition_key):
for valid_prefix in VALID_CONDITION_PREFIXES:
if condition_key.startswith(valid_prefix):
condition_key = condition_key[len(valid_prefix):]
break # strip only the first match
for valid_postfix in VALID_CONDITION_POSTFIXES:
if condition_key.endswith(valid_postfix):
condition_key = condition_key[:-len(valid_postfix)]
break # strip only the first match
return condition_key
@staticmethod
def _validate_sid_syntax(statement):
if "Sid" in statement:
assert isinstance(statement["Sid"], string_types)
def _validate_id_syntax(self):
if "Id" in self._policy_json:
assert isinstance(self._policy_json["Id"], string_types)
def _validate_resource_exist(self):
for statement in self._statements:
assert ("Resource" in statement or "NotResource" in statement)
if "Resource" in statement and isinstance(statement["Resource"], list):
assert statement["Resource"]
elif "NotResource" in statement and isinstance(statement["NotResource"], list):
assert statement["NotResource"]
def _validate_action_like_exist(self):
for statement in self._statements:
assert ("Action" in statement or "NotAction" in statement)
if "Action" in statement and isinstance(statement["Action"], list):
assert statement["Action"]
elif "NotAction" in statement and isinstance(statement["NotAction"], list):
assert statement["NotAction"]
def _validate_actions_for_prefixes(self):
self._validate_action_like_for_prefixes("Action")
def _validate_not_actions_for_prefixes(self):
self._validate_action_like_for_prefixes("NotAction")
def _validate_action_like_for_prefixes(self, key):
for statement in self._statements:
if key in statement:
if isinstance(statement[key], string_types):
self._validate_action_prefix(statement[key])
else:
for action in statement[key]:
self._validate_action_prefix(action)
@staticmethod
def _validate_action_prefix(action):
action_parts = action.split(":")
if len(action_parts) == 1 and action_parts[0] != "*":
raise MalformedPolicyDocument("Actions/Conditions must be prefaced by a vendor, e.g., iam, sdb, ec2, etc.")
elif len(action_parts) > 2:
raise MalformedPolicyDocument("Actions/Condition can contain only one colon.")
vendor_pattern = re.compile(r'[^a-zA-Z0-9\-.]')
if action_parts[0] != "*" and vendor_pattern.search(action_parts[0]):
raise MalformedPolicyDocument("Vendor {vendor} is not valid".format(vendor=action_parts[0]))
def _validate_resources_for_formats(self):
self._validate_resource_like_for_formats("Resource")
def _validate_not_resources_for_formats(self):
self._validate_resource_like_for_formats("NotResource")
def _validate_resource_like_for_formats(self, key):
for statement in self._statements:
if key in statement:
if isinstance(statement[key], string_types):
self._validate_resource_format(statement[key])
else:
for resource in sorted(statement[key], reverse=True):
self._validate_resource_format(resource)
if self._resource_error == "":
IAMPolicyDocumentValidator._legacy_parse_resource_like(statement, key)
def _validate_resource_format(self, resource):
if resource != "*":
resource_partitions = resource.partition(":")
if resource_partitions[1] == "":
self._resource_error = 'Resource {resource} must be in ARN format or "*".'.format(resource=resource)
return
resource_partitions = resource_partitions[2].partition(":")
if resource_partitions[0] != "aws":
remaining_resource_parts = resource_partitions[2].split(":")
arn1 = remaining_resource_parts[0] if remaining_resource_parts[0] != "" or len(remaining_resource_parts) > 1 else "*"
arn2 = remaining_resource_parts[1] if len(remaining_resource_parts) > 1 else "*"
arn3 = remaining_resource_parts[2] if len(remaining_resource_parts) > 2 else "*"
arn4 = ":".join(remaining_resource_parts[3:]) if len(remaining_resource_parts) > 3 else "*"
self._resource_error = 'Partition "{partition}" is not valid for resource "arn:{partition}:{arn1}:{arn2}:{arn3}:{arn4}".'.format(
partition=resource_partitions[0],
arn1=arn1,
arn2=arn2,
arn3=arn3,
arn4=arn4
)
return
if resource_partitions[1] != ":":
self._resource_error = "Resource vendor must be fully qualified and cannot contain regexes."
return
resource_partitions = resource_partitions[2].partition(":")
service = resource_partitions[0]
if service in SERVICE_TYPE_REGION_INFORMATION_ERROR_ASSOCIATIONS.keys() and not resource_partitions[2].startswith(":"):
self._resource_error = SERVICE_TYPE_REGION_INFORMATION_ERROR_ASSOCIATIONS[service].format(resource=resource)
return
resource_partitions = resource_partitions[2].partition(":")
resource_partitions = resource_partitions[2].partition(":")
if service in VALID_RESOURCE_PATH_STARTING_VALUES.keys():
valid_start = False
for valid_starting_value in VALID_RESOURCE_PATH_STARTING_VALUES[service]["values"]:
if resource_partitions[2].startswith(valid_starting_value):
valid_start = True
break
if not valid_start:
self._resource_error = VALID_RESOURCE_PATH_STARTING_VALUES[service]["error_message"].format(
values=", ".join(VALID_RESOURCE_PATH_STARTING_VALUES[service]["values"])
)
def _perform_first_legacy_parsing(self):
"""This method excludes legacy parsing resources, since that have to be done later."""
for statement in self._statements:
self._legacy_parse_statement(statement)
@staticmethod
def _legacy_parse_statement(statement):
assert statement["Effect"] in VALID_EFFECTS # case-sensitive matching
if "Condition" in statement:
for condition_key, condition_value in statement["Condition"].items():
IAMPolicyDocumentValidator._legacy_parse_condition(condition_key, condition_value)
@staticmethod
def _legacy_parse_resource_like(statement, key):
if isinstance(statement[key], string_types):
if statement[key] != "*":
assert statement[key].count(":") >= 5 or "::" not in statement[key]
assert statement[key].split(":")[2] != ""
else: # list
for resource in statement[key]:
if resource != "*":
assert resource.count(":") >= 5 or "::" not in resource
assert resource[2] != ""
@staticmethod
def _legacy_parse_condition(condition_key, condition_value):
stripped_condition_key = IAMPolicyDocumentValidator._strip_condition_key(condition_key)
if stripped_condition_key.startswith("Date"):
for condition_element_key, condition_element_value in condition_value.items():
if isinstance(condition_element_value, string_types):
IAMPolicyDocumentValidator._legacy_parse_date_condition_value(condition_element_value)
else: # it has to be a list
for date_condition_value in condition_element_value:
IAMPolicyDocumentValidator._legacy_parse_date_condition_value(date_condition_value)
@staticmethod
def _legacy_parse_date_condition_value(date_condition_value):
if "t" in date_condition_value.lower() or "-" in date_condition_value:
IAMPolicyDocumentValidator._validate_iso_8601_datetime(date_condition_value.lower())
else: # timestamp
assert 0 <= int(date_condition_value) <= 9223372036854775807
@staticmethod
def _validate_iso_8601_datetime(datetime):
datetime_parts = datetime.partition("t")
negative_year = datetime_parts[0].startswith("-")
date_parts = datetime_parts[0][1:].split("-") if negative_year else datetime_parts[0].split("-")
year = "-" + date_parts[0] if negative_year else date_parts[0]
assert -292275054 <= int(year) <= 292278993
if len(date_parts) > 1:
month = date_parts[1]
assert 1 <= int(month) <= 12
if len(date_parts) > 2:
day = date_parts[2]
assert 1 <= int(day) <= 31
assert len(date_parts) < 4
time_parts = datetime_parts[2].split(":")
if time_parts[0] != "":
hours = time_parts[0]
assert 0 <= int(hours) <= 23
if len(time_parts) > 1:
minutes = time_parts[1]
assert 0 <= int(minutes) <= 59
if len(time_parts) > 2:
if "z" in time_parts[2]:
seconds_with_decimal_fraction = time_parts[2].partition("z")[0]
assert time_parts[2].partition("z")[2] == ""
elif "+" in time_parts[2]:
seconds_with_decimal_fraction = time_parts[2].partition("+")[0]
time_zone_data = time_parts[2].partition("+")[2].partition(":")
time_zone_hours = time_zone_data[0]
assert len(time_zone_hours) == 2
assert 0 <= int(time_zone_hours) <= 23
if time_zone_data[1] == ":":
time_zone_minutes = time_zone_data[2]
assert len(time_zone_minutes) == 2
assert 0 <= int(time_zone_minutes) <= 59
else:
seconds_with_decimal_fraction = time_parts[2]
seconds_with_decimal_fraction_partition = seconds_with_decimal_fraction.partition(".")
seconds = seconds_with_decimal_fraction_partition[0]
assert 0 <= int(seconds) <= 59
if seconds_with_decimal_fraction_partition[1] == ".":
decimal_seconds = seconds_with_decimal_fraction_partition[2]
assert 0 <= int(decimal_seconds) <= 999999999

View File

@ -818,12 +818,12 @@ CREATE_POLICY_TEMPLATE = """<CreatePolicyResponse>
<Policy> <Policy>
<Arn>{{ policy.arn }}</Arn> <Arn>{{ policy.arn }}</Arn>
<AttachmentCount>{{ policy.attachment_count }}</AttachmentCount> <AttachmentCount>{{ policy.attachment_count }}</AttachmentCount>
<CreateDate>{{ policy.create_datetime.isoformat() }}</CreateDate> <CreateDate>{{ policy.created_iso_8601 }}</CreateDate>
<DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId> <DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId>
<Path>{{ policy.path }}</Path> <Path>{{ policy.path }}</Path>
<PolicyId>{{ policy.id }}</PolicyId> <PolicyId>{{ policy.id }}</PolicyId>
<PolicyName>{{ policy.name }}</PolicyName> <PolicyName>{{ policy.name }}</PolicyName>
<UpdateDate>{{ policy.update_datetime.isoformat() }}</UpdateDate> <UpdateDate>{{ policy.updated_iso_8601 }}</UpdateDate>
</Policy> </Policy>
</CreatePolicyResult> </CreatePolicyResult>
<ResponseMetadata> <ResponseMetadata>
@ -841,8 +841,8 @@ GET_POLICY_TEMPLATE = """<GetPolicyResponse>
<Path>{{ policy.path }}</Path> <Path>{{ policy.path }}</Path>
<Arn>{{ policy.arn }}</Arn> <Arn>{{ policy.arn }}</Arn>
<AttachmentCount>{{ policy.attachment_count }}</AttachmentCount> <AttachmentCount>{{ policy.attachment_count }}</AttachmentCount>
<CreateDate>{{ policy.create_datetime.isoformat() }}</CreateDate> <CreateDate>{{ policy.created_iso_8601 }}</CreateDate>
<UpdateDate>{{ policy.update_datetime.isoformat() }}</UpdateDate> <UpdateDate>{{ policy.updated_iso_8601 }}</UpdateDate>
</Policy> </Policy>
</GetPolicyResult> </GetPolicyResult>
<ResponseMetadata> <ResponseMetadata>
@ -929,12 +929,12 @@ LIST_POLICIES_TEMPLATE = """<ListPoliciesResponse>
<member> <member>
<Arn>{{ policy.arn }}</Arn> <Arn>{{ policy.arn }}</Arn>
<AttachmentCount>{{ policy.attachment_count }}</AttachmentCount> <AttachmentCount>{{ policy.attachment_count }}</AttachmentCount>
<CreateDate>{{ policy.create_datetime.isoformat() }}</CreateDate> <CreateDate>{{ policy.created_iso_8601 }}</CreateDate>
<DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId> <DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId>
<Path>{{ policy.path }}</Path> <Path>{{ policy.path }}</Path>
<PolicyId>{{ policy.id }}</PolicyId> <PolicyId>{{ policy.id }}</PolicyId>
<PolicyName>{{ policy.name }}</PolicyName> <PolicyName>{{ policy.name }}</PolicyName>
<UpdateDate>{{ policy.update_datetime.isoformat() }}</UpdateDate> <UpdateDate>{{ policy.updated_iso_8601 }}</UpdateDate>
</member> </member>
{% endfor %} {% endfor %}
</Policies> </Policies>
@ -958,7 +958,7 @@ CREATE_INSTANCE_PROFILE_TEMPLATE = """<CreateInstanceProfileResponse xmlns="http
<InstanceProfileName>{{ profile.name }}</InstanceProfileName> <InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path> <Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn> <Arn>{{ profile.arn }}</Arn>
<CreateDate>{{ profile.create_date }}</CreateDate> <CreateDate>{{ profile.created_iso_8601 }}</CreateDate>
</InstanceProfile> </InstanceProfile>
</CreateInstanceProfileResult> </CreateInstanceProfileResult>
<ResponseMetadata> <ResponseMetadata>
@ -977,7 +977,7 @@ GET_INSTANCE_PROFILE_TEMPLATE = """<GetInstanceProfileResponse xmlns="https://ia
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
</member> </member>
{% endfor %} {% endfor %}
@ -985,7 +985,7 @@ GET_INSTANCE_PROFILE_TEMPLATE = """<GetInstanceProfileResponse xmlns="https://ia
<InstanceProfileName>{{ profile.name }}</InstanceProfileName> <InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path> <Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn> <Arn>{{ profile.arn }}</Arn>
<CreateDate>{{ profile.create_date }}</CreateDate> <CreateDate>{{ profile.created_iso_8601 }}</CreateDate>
</InstanceProfile> </InstanceProfile>
</GetInstanceProfileResult> </GetInstanceProfileResult>
<ResponseMetadata> <ResponseMetadata>
@ -1000,7 +1000,7 @@ CREATE_ROLE_TEMPLATE = """<CreateRoleResponse xmlns="https://iam.amazonaws.com/d
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
{% if role.permissions_boundary %} {% if role.permissions_boundary %}
<PermissionsBoundary> <PermissionsBoundary>
@ -1041,7 +1041,7 @@ UPDATE_ROLE_DESCRIPTION_TEMPLATE = """<UpdateRoleDescriptionResponse xmlns="http
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date.isoformat() }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
{% if role.tags %} {% if role.tags %}
<Tags> <Tags>
@ -1067,7 +1067,7 @@ GET_ROLE_TEMPLATE = """<GetRoleResponse xmlns="https://iam.amazonaws.com/doc/201
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
{% if role.tags %} {% if role.tags %}
<Tags> <Tags>
@ -1108,7 +1108,7 @@ LIST_ROLES_TEMPLATE = """<ListRolesResponse xmlns="https://iam.amazonaws.com/doc
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
{% if role.permissions_boundary %} {% if role.permissions_boundary %}
<PermissionsBoundary> <PermissionsBoundary>
@ -1144,8 +1144,8 @@ CREATE_POLICY_VERSION_TEMPLATE = """<CreatePolicyVersionResponse xmlns="https://
<PolicyVersion> <PolicyVersion>
<Document>{{ policy_version.document }}</Document> <Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId> <VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion> <IsDefaultVersion>{{ policy_version.is_default | lower }}</IsDefaultVersion>
<CreateDate>{{ policy_version.create_datetime }}</CreateDate> <CreateDate>{{ policy_version.created_iso_8601 }}</CreateDate>
</PolicyVersion> </PolicyVersion>
</CreatePolicyVersionResult> </CreatePolicyVersionResult>
<ResponseMetadata> <ResponseMetadata>
@ -1158,8 +1158,8 @@ GET_POLICY_VERSION_TEMPLATE = """<GetPolicyVersionResponse xmlns="https://iam.am
<PolicyVersion> <PolicyVersion>
<Document>{{ policy_version.document }}</Document> <Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId> <VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion> <IsDefaultVersion>{{ policy_version.is_default | lower }}</IsDefaultVersion>
<CreateDate>{{ policy_version.create_datetime }}</CreateDate> <CreateDate>{{ policy_version.created_iso_8601 }}</CreateDate>
</PolicyVersion> </PolicyVersion>
</GetPolicyVersionResult> </GetPolicyVersionResult>
<ResponseMetadata> <ResponseMetadata>
@ -1175,8 +1175,8 @@ LIST_POLICY_VERSIONS_TEMPLATE = """<ListPolicyVersionsResponse xmlns="https://ia
<member> <member>
<Document>{{ policy_version.document }}</Document> <Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId> <VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion> <IsDefaultVersion>{{ policy_version.is_default | lower }}</IsDefaultVersion>
<CreateDate>{{ policy_version.create_datetime }}</CreateDate> <CreateDate>{{ policy_version.created_iso_8601 }}</CreateDate>
</member> </member>
{% endfor %} {% endfor %}
</Versions> </Versions>
@ -1200,7 +1200,7 @@ LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https:
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
</member> </member>
{% endfor %} {% endfor %}
@ -1208,7 +1208,7 @@ LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https:
<InstanceProfileName>{{ instance.name }}</InstanceProfileName> <InstanceProfileName>{{ instance.name }}</InstanceProfileName>
<Path>{{ instance.path }}</Path> <Path>{{ instance.path }}</Path>
<Arn>{{ instance.arn }}</Arn> <Arn>{{ instance.arn }}</Arn>
<CreateDate>{{ instance.create_date }}</CreateDate> <CreateDate>{{ instance.created_iso_8601 }}</CreateDate>
</member> </member>
{% endfor %} {% endfor %}
</InstanceProfiles> </InstanceProfiles>
@ -1287,7 +1287,7 @@ CREATE_GROUP_TEMPLATE = """<CreateGroupResponse>
<GroupName>{{ group.name }}</GroupName> <GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId> <GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn> <Arn>{{ group.arn }}</Arn>
<CreateDate>{{ group.create_date }}</CreateDate> <CreateDate>{{ group.created_iso_8601 }}</CreateDate>
</Group> </Group>
</CreateGroupResult> </CreateGroupResult>
<ResponseMetadata> <ResponseMetadata>
@ -1302,7 +1302,7 @@ GET_GROUP_TEMPLATE = """<GetGroupResponse>
<GroupName>{{ group.name }}</GroupName> <GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId> <GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn> <Arn>{{ group.arn }}</Arn>
<CreateDate>{{ group.create_date }}</CreateDate> <CreateDate>{{ group.created_iso_8601 }}</CreateDate>
</Group> </Group>
<Users> <Users>
{% for user in group.users %} {% for user in group.users %}
@ -1349,6 +1349,7 @@ LIST_GROUPS_FOR_USER_TEMPLATE = """<ListGroupsForUserResponse>
<GroupName>{{ group.name }}</GroupName> <GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId> <GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn> <Arn>{{ group.arn }}</Arn>
<CreateDate>{{ group.created_iso_8601 }}</CreateDate>
</member> </member>
{% endfor %} {% endfor %}
</Groups> </Groups>
@ -1493,6 +1494,7 @@ CREATE_ACCESS_KEY_TEMPLATE = """<CreateAccessKeyResponse>
<AccessKeyId>{{ key.access_key_id }}</AccessKeyId> <AccessKeyId>{{ key.access_key_id }}</AccessKeyId>
<Status>{{ key.status }}</Status> <Status>{{ key.status }}</Status>
<SecretAccessKey>{{ key.secret_access_key }}</SecretAccessKey> <SecretAccessKey>{{ key.secret_access_key }}</SecretAccessKey>
<CreateDate>{{ key.created_iso_8601 }}</CreateDate>
</AccessKey> </AccessKey>
</CreateAccessKeyResult> </CreateAccessKeyResult>
<ResponseMetadata> <ResponseMetadata>
@ -1509,7 +1511,7 @@ LIST_ACCESS_KEYS_TEMPLATE = """<ListAccessKeysResponse>
<UserName>{{ user_name }}</UserName> <UserName>{{ user_name }}</UserName>
<AccessKeyId>{{ key.access_key_id }}</AccessKeyId> <AccessKeyId>{{ key.access_key_id }}</AccessKeyId>
<Status>{{ key.status }}</Status> <Status>{{ key.status }}</Status>
<CreateDate>{{ key.create_date }}</CreateDate> <CreateDate>{{ key.created_iso_8601 }}</CreateDate>
</member> </member>
{% endfor %} {% endfor %}
</AccessKeyMetadata> </AccessKeyMetadata>
@ -1577,7 +1579,7 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleRespon
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
</member> </member>
{% endfor %} {% endfor %}
@ -1585,7 +1587,7 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleRespon
<InstanceProfileName>{{ profile.name }}</InstanceProfileName> <InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path> <Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn> <Arn>{{ profile.arn }}</Arn>
<CreateDate>{{ profile.create_date }}</CreateDate> <CreateDate>{{ profile.created_iso_8601 }}</CreateDate>
</member> </member>
{% endfor %} {% endfor %}
</InstanceProfiles> </InstanceProfiles>
@ -1651,6 +1653,7 @@ LIST_GROUPS_FOR_USER_TEMPLATE = """<ListGroupsForUserResponse>
<GroupName>{{ group.name }}</GroupName> <GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId> <GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn> <Arn>{{ group.arn }}</Arn>
<CreateDate>{{ group.created_iso_8601 }}</CreateDate>
</member> </member>
{% endfor %} {% endfor %}
</Groups> </Groups>
@ -1704,7 +1707,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<GroupName>{{ group.name }}</GroupName> <GroupName>{{ group.name }}</GroupName>
<Path>{{ group.path }}</Path> <Path>{{ group.path }}</Path>
<Arn>{{ group.arn }}</Arn> <Arn>{{ group.arn }}</Arn>
<CreateDate>{{ group.create_date }}</CreateDate> <CreateDate>{{ group.created_iso_8601 }}</CreateDate>
<GroupPolicyList> <GroupPolicyList>
{% for policy in group.policies %} {% for policy in group.policies %}
<member> <member>
@ -1754,7 +1757,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
</member> </member>
{% endfor %} {% endfor %}
@ -1762,7 +1765,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<InstanceProfileName>{{ profile.name }}</InstanceProfileName> <InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path> <Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn> <Arn>{{ profile.arn }}</Arn>
<CreateDate>{{ profile.create_date }}</CreateDate> <CreateDate>{{ profile.created_iso_8601 }}</CreateDate>
</member> </member>
{% endfor %} {% endfor %}
</InstanceProfileList> </InstanceProfileList>
@ -1770,7 +1773,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
<Arn>{{ role.arn }}</Arn> <Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName> <RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument> <AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>{{ role.create_date }}</CreateDate> <CreateDate>{{ role.created_iso_8601 }}</CreateDate>
<RoleId>{{ role.id }}</RoleId> <RoleId>{{ role.id }}</RoleId>
</member> </member>
{% endfor %} {% endfor %}
@ -1786,17 +1789,17 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
{% for policy_version in policy.versions %} {% for policy_version in policy.versions %}
<member> <member>
<Document>{{ policy_version.document }}</Document> <Document>{{ policy_version.document }}</Document>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion> <IsDefaultVersion>{{ policy_version.is_default | lower }}</IsDefaultVersion>
<VersionId>{{ policy_version.version_id }}</VersionId> <VersionId>{{ policy_version.version_id }}</VersionId>
<CreateDate>{{ policy_version.create_datetime }}</CreateDate> <CreateDate>{{ policy_version.created_iso_8601 }}</CreateDate>
</member> </member>
{% endfor %} {% endfor %}
</PolicyVersionList> </PolicyVersionList>
<Arn>{{ policy.arn }}</Arn> <Arn>{{ policy.arn }}</Arn>
<AttachmentCount>1</AttachmentCount> <AttachmentCount>1</AttachmentCount>
<CreateDate>{{ policy.create_datetime }}</CreateDate> <CreateDate>{{ policy.created_iso_8601 }}</CreateDate>
<IsAttachable>true</IsAttachable> <IsAttachable>true</IsAttachable>
<UpdateDate>{{ policy.update_datetime }}</UpdateDate> <UpdateDate>{{ policy.updated_iso_8601 }}</UpdateDate>
</member> </member>
{% endfor %} {% endfor %}
</Policies> </Policies>

View File

@ -7,7 +7,7 @@ import six
def random_alphanumeric(length): def random_alphanumeric(length):
return ''.join(six.text_type( return ''.join(six.text_type(
random.choice( random.choice(
string.ascii_letters + string.digits string.ascii_letters + string.digits + "+" + "/"
)) for _ in range(length) )) for _ in range(length)
) )

View File

@ -123,17 +123,12 @@ class Stream(BaseModel):
self.tags = {} self.tags = {}
self.status = "ACTIVE" self.status = "ACTIVE"
if six.PY3: step = 2**128 // shard_count
izip_longest = itertools.zip_longest hash_ranges = itertools.chain(map(lambda i: (i, i * step, (i + 1) * step),
else: range(shard_count - 1)),
izip_longest = itertools.izip_longest [(shard_count - 1, (shard_count - 1) * step, 2**128)])
for index, start, end in hash_ranges:
for index, start, end in izip_longest(range(shard_count),
range(0, 2**128, 2 **
128 // shard_count),
range(2**128 // shard_count, 2 **
128, 2**128 // shard_count),
fillvalue=2**128):
shard = Shard(index, start, end) shard = Shard(index, start, end)
self.shards[shard.shard_id] = shard self.shards[shard.shard_id] = shard

View File

@ -268,10 +268,26 @@ class fakesock(object):
_sent_data = [] _sent_data = []
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM,
protocol=0): proto=0, fileno=None, _sock=None):
self.truesock = (old_socket(family, type, protocol) """
if httpretty.allow_net_connect Matches both the Python 2 API:
else None) def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
https://github.com/python/cpython/blob/2.7/Lib/socket.py
and the Python 3 API:
def __init__(self, family=-1, type=-1, proto=-1, fileno=None):
https://github.com/python/cpython/blob/3.5/Lib/socket.py
"""
if httpretty.allow_net_connect:
if PY3:
self.truesock = old_socket(family, type, proto, fileno)
else:
# If Python 2, if parameters are passed as arguments, instead of kwargs,
# the 4th argument `_sock` will be interpreted as the `fileno`.
# Check if _sock is none, and if so, pass fileno.
self.truesock = old_socket(family, type, proto, fileno or _sock)
else:
self.truesock = None
self._closed = True self._closed = True
self.fd = FakeSockFile() self.fd = FakeSockFile()
self.fd.socket = self self.fd.socket = self

View File

@ -120,7 +120,7 @@ class RecordSet(BaseModel):
properties["HostedZoneId"]) properties["HostedZoneId"])
try: try:
hosted_zone.delete_rrset_by_name(resource_name) hosted_zone.delete_rrset({'Name': resource_name})
except KeyError: except KeyError:
pass pass
@ -171,7 +171,13 @@ class RecordSet(BaseModel):
self.hosted_zone_name) self.hosted_zone_name)
if not hosted_zone: if not hosted_zone:
hosted_zone = route53_backend.get_hosted_zone(self.hosted_zone_id) hosted_zone = route53_backend.get_hosted_zone(self.hosted_zone_id)
hosted_zone.delete_rrset_by_name(self.name) hosted_zone.delete_rrset({'Name': self.name, 'Type': self.type_})
def reverse_domain_name(domain_name):
if domain_name.endswith('.'): # normalize without trailing dot
domain_name = domain_name[:-1]
return '.'.join(reversed(domain_name.split('.')))
class FakeZone(BaseModel): class FakeZone(BaseModel):
@ -199,9 +205,13 @@ class FakeZone(BaseModel):
self.rrsets.append(new_rrset) self.rrsets.append(new_rrset)
return new_rrset return new_rrset
def delete_rrset_by_name(self, name): def delete_rrset(self, rrset):
self.rrsets = [ self.rrsets = [
record_set for record_set in self.rrsets if record_set.name != name] record_set
for record_set in self.rrsets
if record_set.name != rrset['Name'] or
(rrset.get('Type') is not None and record_set.type_ != rrset['Type'])
]
def delete_rrset_by_id(self, set_identifier): def delete_rrset_by_id(self, set_identifier):
self.rrsets = [ self.rrsets = [
@ -209,12 +219,15 @@ class FakeZone(BaseModel):
def get_record_sets(self, start_type, start_name): def get_record_sets(self, start_type, start_name):
record_sets = list(self.rrsets) # Copy the list record_sets = list(self.rrsets) # Copy the list
if start_name:
record_sets = [
record_set
for record_set in record_sets
if reverse_domain_name(record_set.name) >= reverse_domain_name(start_name)
]
if start_type: if start_type:
record_sets = [ record_sets = [
record_set for record_set in record_sets if record_set.type_ >= start_type] record_set for record_set in record_sets if record_set.type_ >= start_type]
if start_name:
record_sets = [
record_set for record_set in record_sets if record_set.name >= start_name]
return record_sets return record_sets

View File

@ -144,7 +144,7 @@ class Route53(BaseResponse):
the_zone.delete_rrset_by_id( the_zone.delete_rrset_by_id(
record_set["SetIdentifier"]) record_set["SetIdentifier"])
else: else:
the_zone.delete_rrset_by_name(record_set["Name"]) the_zone.delete_rrset(record_set)
return 200, headers, CHANGE_RRSET_RESPONSE return 200, headers, CHANGE_RRSET_RESPONSE

View File

@ -12,7 +12,7 @@ from boto3 import Session
from moto.compat import OrderedDict from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds from moto.core.utils import iso_8601_datetime_with_milliseconds, camelcase_to_underscores
from moto.sqs import sqs_backends from moto.sqs import sqs_backends
from moto.awslambda import lambda_backends from moto.awslambda import lambda_backends
@ -243,11 +243,14 @@ class SNSBackend(BaseBackend):
def update_sms_attributes(self, attrs): def update_sms_attributes(self, attrs):
self.sms_attributes.update(attrs) self.sms_attributes.update(attrs)
def create_topic(self, name): def create_topic(self, name, attributes=None):
fails_constraints = not re.match(r'^[a-zA-Z0-9_-]{1,256}$', name) fails_constraints = not re.match(r'^[a-zA-Z0-9_-]{1,256}$', name)
if fails_constraints: if fails_constraints:
raise InvalidParameterValue("Topic names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 256 characters long.") raise InvalidParameterValue("Topic names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 256 characters long.")
candidate_topic = Topic(name, self) candidate_topic = Topic(name, self)
if attributes:
for attribute in attributes:
setattr(candidate_topic, camelcase_to_underscores(attribute), attributes[attribute])
if candidate_topic.arn in self.topics: if candidate_topic.arn in self.topics:
return self.topics[candidate_topic.arn] return self.topics[candidate_topic.arn]
else: else:

View File

@ -75,7 +75,8 @@ class SNSResponse(BaseResponse):
def create_topic(self): def create_topic(self):
name = self._get_param('Name') name = self._get_param('Name')
topic = self.backend.create_topic(name) attributes = self._get_attributes()
topic = self.backend.create_topic(name, attributes)
if self.request_json: if self.request_json:
return json.dumps({ return json.dumps({

View File

@ -0,0 +1,25 @@
package com.amazonaws.examples
import com.amazonaws.client.builder.AwsClientBuilder
import com.amazonaws.regions.{Region, Regions}
import com.amazonaws.services.sqs.AmazonSQSClientBuilder
import scala.jdk.CollectionConverters._
object QueueTest extends App {
val region = Region.getRegion(Regions.US_WEST_2).getName
val serviceEndpoint = "http://localhost:5000"
val amazonSqs = AmazonSQSClientBuilder.standard()
.withEndpointConfiguration(
new AwsClientBuilder.EndpointConfiguration(serviceEndpoint, region))
.build
val queueName = "my-first-queue"
amazonSqs.createQueue(queueName)
val urls = amazonSqs.listQueues().getQueueUrls.asScala
println("Listing queues")
println(urls.map(url => s" - $url").mkString(System.lineSeparator))
println()
}

View File

@ -48,7 +48,8 @@ for policy_name in policies:
PolicyArn=policies[policy_name]['Arn'], PolicyArn=policies[policy_name]['Arn'],
VersionId=policies[policy_name]['DefaultVersionId']) VersionId=policies[policy_name]['DefaultVersionId'])
for key in response['PolicyVersion']: for key in response['PolicyVersion']:
policies[policy_name][key] = response['PolicyVersion'][key] if key != "CreateDate": # the policy's CreateDate should not be overwritten by its version's CreateDate
policies[policy_name][key] = response['PolicyVersion'][key]
with open(output_file, 'w') as f: with open(output_file, 'w') as f:
triple_quote = '\"\"\"' triple_quote = '\"\"\"'

View File

@ -18,17 +18,27 @@ def read(*parts):
return fp.read() return fp.read()
def get_version():
version_file = read('moto', '__init__.py')
version_match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]',
version_file, re.MULTILINE)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
install_requires = [ install_requires = [
"Jinja2>=2.10.1", "Jinja2>=2.10.1",
"boto>=2.36.0", "boto>=2.36.0",
"boto3>=1.9.86", "boto3>=1.9.86",
"botocore>=1.12.86", "botocore>=1.12.86",
"cryptography>=2.3.0", "cryptography>=2.3.0",
"datetime",
"requests>=2.5", "requests>=2.5",
"xmltodict", "xmltodict",
"six>1.9", "six>1.9",
"werkzeug", "werkzeug",
"PyYAML==3.13", "PyYAML>=5.1",
"pytz", "pytz",
"python-dateutil<3.0.0,>=2.1", "python-dateutil<3.0.0,>=2.1",
"python-jose<4.0.0", "python-jose<4.0.0",
@ -56,7 +66,7 @@ else:
setup( setup(
name='moto', name='moto',
version='1.3.8', version=get_version(),
description='A library that allows your python tests to easily' description='A library that allows your python tests to easily'
' mock out the boto library', ' mock out the boto library',
long_description=read('README.md'), long_description=read('README.md'),
@ -79,10 +89,10 @@ setup(
"Programming Language :: Python :: 2", "Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7", "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: Apache Software License", "License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing", "Topic :: Software Development :: Testing",
], ],

View File

@ -0,0 +1,48 @@
import unittest
from moto import mock_dynamodb2_deprecated, mock_dynamodb2
import socket
from six import PY3
class TestSocketPair(unittest.TestCase):
@mock_dynamodb2_deprecated
def test_asyncio_deprecated(self):
if PY3:
self.assertIn(
'moto.packages.httpretty.core.fakesock.socket',
str(socket.socket),
'Our mock should be present'
)
import asyncio
self.assertIsNotNone(asyncio.get_event_loop())
@mock_dynamodb2_deprecated
def test_socket_pair_deprecated(self):
# In Python2, the fakesocket is not set, for some reason.
if PY3:
self.assertIn(
'moto.packages.httpretty.core.fakesock.socket',
str(socket.socket),
'Our mock should be present'
)
a, b = socket.socketpair()
self.assertIsNotNone(a)
self.assertIsNotNone(b)
if a:
a.close()
if b:
b.close()
@mock_dynamodb2
def test_socket_pair(self):
a, b = socket.socketpair()
self.assertIsNotNone(a)
self.assertIsNotNone(b)
if a:
a.close()
if b:
b.close()

View File

@ -452,6 +452,90 @@ def test_basic_projection_expressions():
assert 'body' in results['Items'][1] assert 'body' in results['Items'][1]
assert 'forum_name' in results['Items'][1] assert 'forum_name' in results['Items'][1]
@mock_dynamodb2
def test_basic_projection_expressions_using_scan():
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
# Create the DynamoDB table.
table = dynamodb.create_table(
TableName='users',
KeySchema=[
{
'AttributeName': 'forum_name',
'KeyType': 'HASH'
},
{
'AttributeName': 'subject',
'KeyType': 'RANGE'
},
],
AttributeDefinitions=[
{
'AttributeName': 'forum_name',
'AttributeType': 'S'
},
{
'AttributeName': 'subject',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 5,
'WriteCapacityUnits': 5
}
)
table = dynamodb.Table('users')
table.put_item(Item={
'forum_name': 'the-key',
'subject': '123',
'body': 'some test message'
})
table.put_item(Item={
'forum_name': 'not-the-key',
'subject': '123',
'body': 'some other test message'
})
# Test a scan returning all items
results = table.scan(
FilterExpression=Key('forum_name').eq(
'the-key'),
ProjectionExpression='body, subject'
)
assert 'body' in results['Items'][0]
assert results['Items'][0]['body'] == 'some test message'
assert 'subject' in results['Items'][0]
table.put_item(Item={
'forum_name': 'the-key',
'subject': '1234',
'body': 'yet another test message'
})
results = table.scan(
FilterExpression=Key('forum_name').eq(
'the-key'),
ProjectionExpression='body'
)
assert 'body' in results['Items'][0]
assert 'subject' not in results['Items'][0]
assert 'forum_name' not in results['Items'][0]
assert 'body' in results['Items'][1]
assert 'subject' not in results['Items'][1]
assert 'forum_name' not in results['Items'][1]
# The projection expression should not remove data from storage
results = table.query(
KeyConditionExpression=Key('forum_name').eq(
'the-key'),
)
assert 'subject' in results['Items'][0]
assert 'body' in results['Items'][1]
assert 'forum_name' in results['Items'][1]
@mock_dynamodb2 @mock_dynamodb2
def test_basic_projection_expressions_with_attr_expression_names(): def test_basic_projection_expressions_with_attr_expression_names():
@ -519,6 +603,84 @@ def test_basic_projection_expressions_with_attr_expression_names():
assert 'attachment' in results['Items'][0] assert 'attachment' in results['Items'][0]
assert results['Items'][0]['attachment'] == 'something' assert results['Items'][0]['attachment'] == 'something'
@mock_dynamodb2
def test_basic_projection_expressions_using_scan_with_attr_expression_names():
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
# Create the DynamoDB table.
table = dynamodb.create_table(
TableName='users',
KeySchema=[
{
'AttributeName': 'forum_name',
'KeyType': 'HASH'
},
{
'AttributeName': 'subject',
'KeyType': 'RANGE'
},
],
AttributeDefinitions=[
{
'AttributeName': 'forum_name',
'AttributeType': 'S'
},
{
'AttributeName': 'subject',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 5,
'WriteCapacityUnits': 5
}
)
table = dynamodb.Table('users')
table.put_item(Item={
'forum_name': 'the-key',
'subject': '123',
'body': 'some test message',
'attachment': 'something'
})
table.put_item(Item={
'forum_name': 'not-the-key',
'subject': '123',
'body': 'some other test message',
'attachment': 'something'
})
# Test a scan returning all items
results = table.scan(
FilterExpression=Key('forum_name').eq(
'the-key'),
ProjectionExpression='#rl, #rt, subject',
ExpressionAttributeNames={
'#rl': 'body',
'#rt': 'attachment'
},
)
assert 'body' in results['Items'][0]
assert 'attachment' in results['Items'][0]
assert 'subject' in results['Items'][0]
assert 'form_name' not in results['Items'][0]
# Test without a FilterExpression
results = table.scan(
ProjectionExpression='#rl, #rt, subject',
ExpressionAttributeNames={
'#rl': 'body',
'#rt': 'attachment'
},
)
assert 'body' in results['Items'][0]
assert 'attachment' in results['Items'][0]
assert 'subject' in results['Items'][0]
assert 'form_name' not in results['Items'][0]
@mock_dynamodb2 @mock_dynamodb2
def test_put_item_returns_consumed_capacity(): def test_put_item_returns_consumed_capacity():

View File

@ -1,5 +1,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
# Ensure 'assert_raises' context manager support for Python 2.6 # Ensure 'assert_raises' context manager support for Python 2.6
from botocore.exceptions import ClientError
import tests.backport_assert_raises import tests.backport_assert_raises
from nose.tools import assert_raises from nose.tools import assert_raises
@ -679,8 +681,8 @@ def test_modify_instance_attribute_security_groups():
reservation = conn.run_instances('ami-1234abcd') reservation = conn.run_instances('ami-1234abcd')
instance = reservation.instances[0] instance = reservation.instances[0]
sg_id = 'sg-1234abcd' sg_id = conn.create_security_group('test security group', 'this is a test security group').id
sg_id2 = 'sg-abcd4321' sg_id2 = conn.create_security_group('test security group 2', 'this is a test security group 2').id
with assert_raises(EC2ResponseError) as ex: with assert_raises(EC2ResponseError) as ex:
instance.modify_attribute("groupSet", [sg_id, sg_id2], dry_run=True) instance.modify_attribute("groupSet", [sg_id, sg_id2], dry_run=True)
@ -1255,6 +1257,7 @@ def test_create_instance_ebs_optimized():
instance.load() instance.load()
instance.ebs_optimized.should.be(False) instance.ebs_optimized.should.be(False)
@mock_ec2 @mock_ec2
def test_run_multiple_instances_in_same_command(): def test_run_multiple_instances_in_same_command():
instance_count = 4 instance_count = 4
@ -1269,3 +1272,37 @@ def test_run_multiple_instances_in_same_command():
instances = reservations[0]['Instances'] instances = reservations[0]['Instances']
for i in range(0, instance_count): for i in range(0, instance_count):
instances[i]['AmiLaunchIndex'].should.be(i) instances[i]['AmiLaunchIndex'].should.be(i)
@mock_ec2
def test_describe_instance_attribute():
client = boto3.client('ec2', region_name='us-east-1')
security_group_id = client.create_security_group(
GroupName='test security group', Description='this is a test security group')['GroupId']
client.run_instances(ImageId='ami-1234abcd',
MinCount=1,
MaxCount=1,
SecurityGroupIds=[security_group_id])
instance_id = client.describe_instances()['Reservations'][0]['Instances'][0]['InstanceId']
valid_instance_attributes = ['instanceType', 'kernel', 'ramdisk', 'userData', 'disableApiTermination', 'instanceInitiatedShutdownBehavior', 'rootDeviceName', 'blockDeviceMapping', 'productCodes', 'sourceDestCheck', 'groupSet', 'ebsOptimized', 'sriovNetSupport']
for valid_instance_attribute in valid_instance_attributes:
response = client.describe_instance_attribute(InstanceId=instance_id, Attribute=valid_instance_attribute)
if valid_instance_attribute == "groupSet":
response.should.have.key("Groups")
response["Groups"].should.have.length_of(1)
response["Groups"][0]["GroupId"].should.equal(security_group_id)
elif valid_instance_attribute == "userData":
response.should.have.key("UserData")
response["UserData"].should.be.empty
invalid_instance_attributes = ['abc', 'Kernel', 'RamDisk', 'userdata', 'iNsTaNcEtYpE']
for invalid_instance_attribute in invalid_instance_attributes:
with assert_raises(ClientError) as ex:
client.describe_instance_attribute(InstanceId=instance_id, Attribute=invalid_instance_attribute)
ex.exception.response['Error']['Code'].should.equal('InvalidParameterValue')
ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
message = 'Value ({invalid_instance_attribute}) for parameter attribute is invalid. Unknown attribute.'.format(invalid_instance_attribute=invalid_instance_attribute)
ex.exception.response['Error']['Message'].should.equal(message)

View File

@ -3,6 +3,8 @@ from __future__ import unicode_literals
import hashlib import hashlib
import json import json
from datetime import datetime from datetime import datetime
from freezegun import freeze_time
import os
from random import random from random import random
import re import re
@ -13,6 +15,7 @@ from botocore.exceptions import ClientError, ParamValidationError
from dateutil.tz import tzlocal from dateutil.tz import tzlocal
from moto import mock_ecr from moto import mock_ecr
from nose import SkipTest
def _create_image_digest(contents=None): def _create_image_digest(contents=None):
@ -198,6 +201,42 @@ def test_put_image():
response['image']['repositoryName'].should.equal('test_repository') response['image']['repositoryName'].should.equal('test_repository')
response['image']['registryId'].should.equal('012345678910') response['image']['registryId'].should.equal('012345678910')
@mock_ecr
def test_put_image_with_push_date():
if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'true':
raise SkipTest('Cant manipulate time in server mode')
client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository(
repositoryName='test_repository'
)
with freeze_time('2018-08-28 00:00:00'):
image1_date = datetime.now()
_ = client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(_create_image_manifest()),
imageTag='latest'
)
with freeze_time('2019-05-31 00:00:00'):
image2_date = datetime.now()
_ = client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(_create_image_manifest()),
imageTag='latest'
)
describe_response = client.describe_images(repositoryName='test_repository')
type(describe_response['imageDetails']).should.be(list)
len(describe_response['imageDetails']).should.be(2)
set([describe_response['imageDetails'][0]['imagePushedAt'],
describe_response['imageDetails'][1]['imagePushedAt']]).should.equal(set([image1_date, image2_date]))
@mock_ecr @mock_ecr
def test_put_image_with_multiple_tags(): def test_put_image_with_multiple_tags():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
@ -240,6 +279,7 @@ def test_put_image_with_multiple_tags():
len(response2['imageDetails'][0]['imageTags']).should.be(2) len(response2['imageDetails'][0]['imageTags']).should.be(2)
response2['imageDetails'][0]['imageTags'].should.be.equal(['v1', 'latest']) response2['imageDetails'][0]['imageTags'].should.be.equal(['v1', 'latest'])
@mock_ecr @mock_ecr
def test_list_images(): def test_list_images():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
@ -700,7 +740,7 @@ def test_batch_get_image_no_tags():
@mock_ecr @mock_ecr
def test_batch_delete_image_by_tag(): def test_batch_delete_image_by_tag():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository( client.create_repository(
repositoryName='test_repository' repositoryName='test_repository'
) )
@ -708,14 +748,13 @@ def test_batch_delete_image_by_tag():
tags = ['v1', 'v1.0', 'latest'] tags = ['v1', 'v1.0', 'latest']
for tag in tags: for tag in tags:
put_response = client.put_image( client.put_image(
repositoryName='test_repository', repositoryName='test_repository',
imageManifest=json.dumps(manifest), imageManifest=json.dumps(manifest),
imageTag=tag, imageTag=tag,
) )
describe_response1 = client.describe_images(repositoryName='test_repository') describe_response1 = client.describe_images(repositoryName='test_repository')
image_digest = describe_response1['imageDetails'][0]['imageDigest']
batch_delete_response = client.batch_delete_image( batch_delete_response = client.batch_delete_image(
registryId='012345678910', registryId='012345678910',
@ -744,10 +783,52 @@ def test_batch_delete_image_by_tag():
len(batch_delete_response['failures']).should.be(0) len(batch_delete_response['failures']).should.be(0)
@mock_ecr
def test_batch_delete_image_delete_last_tag():
client = boto3.client('ecr', region_name='us-east-1')
client.create_repository(
repositoryName='test_repository'
)
client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(_create_image_manifest()),
imageTag='v1',
)
describe_response1 = client.describe_images(repositoryName='test_repository')
batch_delete_response = client.batch_delete_image(
registryId='012345678910',
repositoryName='test_repository',
imageIds=[
{
'imageTag': 'v1'
},
],
)
describe_response2 = client.describe_images(repositoryName='test_repository')
type(describe_response1['imageDetails'][0]['imageTags']).should.be(list)
len(describe_response1['imageDetails'][0]['imageTags']).should.be(1)
type(describe_response2['imageDetails']).should.be(list)
len(describe_response2['imageDetails']).should.be(0)
type(batch_delete_response['imageIds']).should.be(list)
len(batch_delete_response['imageIds']).should.be(1)
batch_delete_response['imageIds'][0]['imageTag'].should.equal("v1")
type(batch_delete_response['failures']).should.be(list)
len(batch_delete_response['failures']).should.be(0)
@mock_ecr @mock_ecr
def test_batch_delete_image_with_nonexistent_tag(): def test_batch_delete_image_with_nonexistent_tag():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository( client.create_repository(
repositoryName='test_repository' repositoryName='test_repository'
) )
@ -755,14 +836,13 @@ def test_batch_delete_image_with_nonexistent_tag():
tags = ['v1', 'v1.0', 'latest'] tags = ['v1', 'v1.0', 'latest']
for tag in tags: for tag in tags:
put_response = client.put_image( client.put_image(
repositoryName='test_repository', repositoryName='test_repository',
imageManifest=json.dumps(manifest), imageManifest=json.dumps(manifest),
imageTag=tag, imageTag=tag,
) )
describe_response = client.describe_images(repositoryName='test_repository') describe_response = client.describe_images(repositoryName='test_repository')
image_digest = describe_response['imageDetails'][0]['imageDigest']
missing_tag = "missing-tag" missing_tag = "missing-tag"
batch_delete_response = client.batch_delete_image( batch_delete_response = client.batch_delete_image(
@ -792,7 +872,7 @@ def test_batch_delete_image_with_nonexistent_tag():
@mock_ecr @mock_ecr
def test_batch_delete_image_by_digest(): def test_batch_delete_image_by_digest():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository( client.create_repository(
repositoryName='test_repository' repositoryName='test_repository'
) )
@ -800,7 +880,7 @@ def test_batch_delete_image_by_digest():
tags = ['v1', 'v2', 'latest'] tags = ['v1', 'v2', 'latest']
for tag in tags: for tag in tags:
put_response = client.put_image( client.put_image(
repositoryName='test_repository', repositoryName='test_repository',
imageManifest=json.dumps(manifest), imageManifest=json.dumps(manifest),
imageTag=tag imageTag=tag
@ -843,7 +923,7 @@ def test_batch_delete_image_by_digest():
@mock_ecr @mock_ecr
def test_batch_delete_image_with_invalid_digest(): def test_batch_delete_image_with_invalid_digest():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository( client.create_repository(
repositoryName='test_repository' repositoryName='test_repository'
) )
@ -851,13 +931,12 @@ def test_batch_delete_image_with_invalid_digest():
tags = ['v1', 'v2', 'latest'] tags = ['v1', 'v2', 'latest']
for tag in tags: for tag in tags:
put_response = client.put_image( client.put_image(
repositoryName='test_repository', repositoryName='test_repository',
imageManifest=json.dumps(manifest), imageManifest=json.dumps(manifest),
imageTag=tag imageTag=tag
) )
describe_response = client.describe_images(repositoryName='test_repository')
invalid_image_digest = 'sha256:invalid-digest' invalid_image_digest = 'sha256:invalid-digest'
batch_delete_response = client.batch_delete_image( batch_delete_response = client.batch_delete_image(
@ -884,7 +963,7 @@ def test_batch_delete_image_with_invalid_digest():
@mock_ecr @mock_ecr
def test_batch_delete_image_with_missing_parameters(): def test_batch_delete_image_with_missing_parameters():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository( client.create_repository(
repositoryName='test_repository' repositoryName='test_repository'
) )
@ -910,7 +989,7 @@ def test_batch_delete_image_with_missing_parameters():
@mock_ecr @mock_ecr
def test_batch_delete_image_with_matching_digest_and_tag(): def test_batch_delete_image_with_matching_digest_and_tag():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository( client.create_repository(
repositoryName='test_repository' repositoryName='test_repository'
) )
@ -918,7 +997,7 @@ def test_batch_delete_image_with_matching_digest_and_tag():
tags = ['v1', 'v1.0', 'latest'] tags = ['v1', 'v1.0', 'latest']
for tag in tags: for tag in tags:
put_response = client.put_image( client.put_image(
repositoryName='test_repository', repositoryName='test_repository',
imageManifest=json.dumps(manifest), imageManifest=json.dumps(manifest),
imageTag=tag imageTag=tag
@ -962,7 +1041,7 @@ def test_batch_delete_image_with_matching_digest_and_tag():
@mock_ecr @mock_ecr
def test_batch_delete_image_with_mismatched_digest_and_tag(): def test_batch_delete_image_with_mismatched_digest_and_tag():
client = boto3.client('ecr', region_name='us-east-1') client = boto3.client('ecr', region_name='us-east-1')
_ = client.create_repository( client.create_repository(
repositoryName='test_repository' repositoryName='test_repository'
) )
@ -970,7 +1049,7 @@ def test_batch_delete_image_with_mismatched_digest_and_tag():
tags = ['v1', 'latest'] tags = ['v1', 'latest']
for tag in tags: for tag in tags:
put_response = client.put_image( client.put_image(
repositoryName='test_repository', repositoryName='test_repository',
imageManifest=json.dumps(manifest), imageManifest=json.dumps(manifest),
imageTag=tag imageTag=tag

View File

@ -229,6 +229,26 @@ def test_delete_table():
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException') exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
exc.exception.response['Error']['Message'].should.match('Table myspecialtable not found') exc.exception.response['Error']['Message'].should.match('Table myspecialtable not found')
@mock_glue
def test_batch_delete_table():
client = boto3.client('glue', region_name='us-east-1')
database_name = 'myspecialdatabase'
helpers.create_database(client, database_name)
table_name = 'myspecialtable'
table_input = helpers.create_table_input(database_name, table_name)
helpers.create_table(client, database_name, table_name, table_input)
result = client.batch_delete_table(DatabaseName=database_name, TablesToDelete=[table_name])
result['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
# confirm table is deleted
with assert_raises(ClientError) as exc:
helpers.get_table(client, database_name, table_name)
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
exc.exception.response['Error']['Message'].should.match('Table myspecialtable not found')
@mock_glue @mock_glue
def test_get_partitions_empty(): def test_get_partitions_empty():
@ -310,6 +330,72 @@ def test_get_partition_not_found():
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException') exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
exc.exception.response['Error']['Message'].should.match('partition') exc.exception.response['Error']['Message'].should.match('partition')
@mock_glue
def test_batch_create_partition():
client = boto3.client('glue', region_name='us-east-1')
database_name = 'myspecialdatabase'
table_name = 'myfirsttable'
helpers.create_database(client, database_name)
helpers.create_table(client, database_name, table_name)
before = datetime.now(pytz.utc)
partition_inputs = []
for i in range(0, 20):
values = ["2018-10-{:2}".format(i)]
part_input = helpers.create_partition_input(database_name, table_name, values=values)
partition_inputs.append(part_input)
client.batch_create_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionInputList=partition_inputs
)
after = datetime.now(pytz.utc)
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
partitions = response['Partitions']
partitions.should.have.length_of(20)
for idx, partition in enumerate(partitions):
partition_input = partition_inputs[idx]
partition['TableName'].should.equal(table_name)
partition['StorageDescriptor'].should.equal(partition_input['StorageDescriptor'])
partition['Values'].should.equal(partition_input['Values'])
partition['CreationTime'].should.be.greater_than(before)
partition['CreationTime'].should.be.lower_than(after)
@mock_glue
def test_batch_create_partition_already_exist():
client = boto3.client('glue', region_name='us-east-1')
database_name = 'myspecialdatabase'
table_name = 'myfirsttable'
values = ['2018-10-01']
helpers.create_database(client, database_name)
helpers.create_table(client, database_name, table_name)
helpers.create_partition(client, database_name, table_name, values=values)
partition_input = helpers.create_partition_input(database_name, table_name, values=values)
response = client.batch_create_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionInputList=[partition_input]
)
response.should.have.key('Errors')
response['Errors'].should.have.length_of(1)
response['Errors'][0]['PartitionValues'].should.equal(values)
response['Errors'][0]['ErrorDetail']['ErrorCode'].should.equal('AlreadyExistsException')
@mock_glue @mock_glue
def test_get_partition(): def test_get_partition():
@ -445,3 +531,112 @@ def test_update_partition_move():
partition['TableName'].should.equal(table_name) partition['TableName'].should.equal(table_name)
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}]) partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
@mock_glue
def test_delete_partition():
client = boto3.client('glue', region_name='us-east-1')
database_name = 'myspecialdatabase'
table_name = 'myfirsttable'
values = ['2018-10-01']
helpers.create_database(client, database_name)
helpers.create_table(client, database_name, table_name)
part_input = helpers.create_partition_input(database_name, table_name, values=values)
helpers.create_partition(client, database_name, table_name, part_input)
client.delete_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionValues=values,
)
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
partitions = response['Partitions']
partitions.should.be.empty
@mock_glue
def test_delete_partition_bad_partition():
client = boto3.client('glue', region_name='us-east-1')
database_name = 'myspecialdatabase'
table_name = 'myfirsttable'
values = ['2018-10-01']
helpers.create_database(client, database_name)
helpers.create_table(client, database_name, table_name)
with assert_raises(ClientError) as exc:
client.delete_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionValues=values,
)
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
@mock_glue
def test_batch_delete_partition():
client = boto3.client('glue', region_name='us-east-1')
database_name = 'myspecialdatabase'
table_name = 'myfirsttable'
helpers.create_database(client, database_name)
helpers.create_table(client, database_name, table_name)
partition_inputs = []
for i in range(0, 20):
values = ["2018-10-{:2}".format(i)]
part_input = helpers.create_partition_input(database_name, table_name, values=values)
partition_inputs.append(part_input)
client.batch_create_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionInputList=partition_inputs
)
partition_values = [{"Values": p["Values"]} for p in partition_inputs]
response = client.batch_delete_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionsToDelete=partition_values,
)
response.should_not.have.key('Errors')
@mock_glue
def test_batch_delete_partition_with_bad_partitions():
client = boto3.client('glue', region_name='us-east-1')
database_name = 'myspecialdatabase'
table_name = 'myfirsttable'
helpers.create_database(client, database_name)
helpers.create_table(client, database_name, table_name)
partition_inputs = []
for i in range(0, 20):
values = ["2018-10-{:2}".format(i)]
part_input = helpers.create_partition_input(database_name, table_name, values=values)
partition_inputs.append(part_input)
client.batch_create_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionInputList=partition_inputs
)
partition_values = [{"Values": p["Values"]} for p in partition_inputs]
partition_values.insert(5, {"Values": ["2018-11-01"]})
partition_values.insert(10, {"Values": ["2018-11-02"]})
partition_values.insert(15, {"Values": ["2018-11-03"]})
response = client.batch_delete_partition(
DatabaseName=database_name,
TableName=table_name,
PartitionsToDelete=partition_values,
)
response.should.have.key('Errors')
response['Errors'].should.have.length_of(3)
error_partitions = map(lambda x: x['PartitionValues'], response['Errors'])
['2018-11-01'].should.be.within(error_partitions)
['2018-11-02'].should.be.within(error_partitions)
['2018-11-03'].should.be.within(error_partitions)

View File

@ -1,5 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import base64 import base64
import json
import boto import boto
import boto3 import boto3
@ -29,6 +30,44 @@ FyDHrtlrS80dPUQWNYHw++oACDpWO01LGLPPrGmuO/7cOdojPEd852q5gd+7W9xt
8vUH+pBa6IBLbvBp+szli51V3TLSWcoyy4ceJNQU2vCkTLoFdS0RLd/7tQ== 8vUH+pBa6IBLbvBp+szli51V3TLSWcoyy4ceJNQU2vCkTLoFdS0RLd/7tQ==
-----END CERTIFICATE-----""" -----END CERTIFICATE-----"""
MOCK_POLICY = """
{
"Version": "2012-10-17",
"Statement":
{
"Effect": "Allow",
"Action": "s3:ListBucket",
"Resource": "arn:aws:s3:::example_bucket"
}
}
"""
MOCK_POLICY_2 = """
{
"Version": "2012-10-17",
"Id": "2",
"Statement":
{
"Effect": "Allow",
"Action": "s3:ListBucket",
"Resource": "arn:aws:s3:::example_bucket"
}
}
"""
MOCK_POLICY_3 = """
{
"Version": "2012-10-17",
"Id": "3",
"Statement":
{
"Effect": "Allow",
"Action": "s3:ListBucket",
"Resource": "arn:aws:s3:::example_bucket"
}
}
"""
@mock_iam_deprecated() @mock_iam_deprecated()
def test_get_all_server_certs(): def test_get_all_server_certs():
@ -243,12 +282,12 @@ def test_list_instance_profiles_for_role():
def test_list_role_policies(): def test_list_role_policies():
conn = boto.connect_iam() conn = boto.connect_iam()
conn.create_role("my-role") conn.create_role("my-role")
conn.put_role_policy("my-role", "test policy", "my policy") conn.put_role_policy("my-role", "test policy", MOCK_POLICY)
role = conn.list_role_policies("my-role") role = conn.list_role_policies("my-role")
role.policy_names.should.have.length_of(1) role.policy_names.should.have.length_of(1)
role.policy_names[0].should.equal("test policy") role.policy_names[0].should.equal("test policy")
conn.put_role_policy("my-role", "test policy 2", "another policy") conn.put_role_policy("my-role", "test policy 2", MOCK_POLICY)
role = conn.list_role_policies("my-role") role = conn.list_role_policies("my-role")
role.policy_names.should.have.length_of(2) role.policy_names.should.have.length_of(2)
@ -266,12 +305,21 @@ def test_put_role_policy():
conn = boto.connect_iam() conn = boto.connect_iam()
conn.create_role( conn.create_role(
"my-role", assume_role_policy_document="some policy", path="my-path") "my-role", assume_role_policy_document="some policy", path="my-path")
conn.put_role_policy("my-role", "test policy", "my policy") conn.put_role_policy("my-role", "test policy", MOCK_POLICY)
policy = conn.get_role_policy( policy = conn.get_role_policy(
"my-role", "test policy")['get_role_policy_response']['get_role_policy_result']['policy_name'] "my-role", "test policy")['get_role_policy_response']['get_role_policy_result']['policy_name']
policy.should.equal("test policy") policy.should.equal("test policy")
@mock_iam
def test_get_role_policy():
conn = boto3.client('iam', region_name='us-east-1')
conn.create_role(
RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="my-path")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role_policy(RoleName="my-role", PolicyName="does-not-exist")
@mock_iam_deprecated() @mock_iam_deprecated()
def test_update_assume_role_policy(): def test_update_assume_role_policy():
conn = boto.connect_iam() conn = boto.connect_iam()
@ -286,7 +334,7 @@ def test_create_policy():
conn = boto3.client('iam', region_name='us-east-1') conn = boto3.client('iam', region_name='us-east-1')
response = conn.create_policy( response = conn.create_policy(
PolicyName="TestCreatePolicy", PolicyName="TestCreatePolicy",
PolicyDocument='{"some":"policy"}') PolicyDocument=MOCK_POLICY)
response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestCreatePolicy") response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestCreatePolicy")
@ -299,20 +347,62 @@ def test_create_policy_versions():
PolicyDocument='{"some":"policy"}') PolicyDocument='{"some":"policy"}')
conn.create_policy( conn.create_policy(
PolicyName="TestCreatePolicyVersion", PolicyName="TestCreatePolicyVersion",
PolicyDocument='{"some":"policy"}') PolicyDocument=MOCK_POLICY)
version = conn.create_policy_version( version = conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
PolicyDocument='{"some":"policy"}', PolicyDocument=MOCK_POLICY,
SetAsDefault=True) SetAsDefault=True)
version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'}) version.get('PolicyVersion').get('Document').should.equal(json.loads(MOCK_POLICY))
version.get('PolicyVersion').get('VersionId').should.equal("v2") version.get('PolicyVersion').get('VersionId').should.equal("v2")
version.get('PolicyVersion').get('IsDefaultVersion').should.be.ok
conn.delete_policy_version( conn.delete_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
VersionId="v1") VersionId="v1")
version = conn.create_policy_version( version = conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion", PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
PolicyDocument='{"some":"policy"}') PolicyDocument=MOCK_POLICY)
version.get('PolicyVersion').get('VersionId').should.equal("v3") version.get('PolicyVersion').get('VersionId').should.equal("v3")
version.get('PolicyVersion').get('IsDefaultVersion').shouldnt.be.ok
@mock_iam
def test_create_many_policy_versions():
conn = boto3.client('iam', region_name='us-east-1')
conn.create_policy(
PolicyName="TestCreateManyPolicyVersions",
PolicyDocument=MOCK_POLICY)
for _ in range(0, 4):
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestCreateManyPolicyVersions",
PolicyDocument=MOCK_POLICY)
with assert_raises(ClientError):
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestCreateManyPolicyVersions",
PolicyDocument=MOCK_POLICY)
@mock_iam
def test_set_default_policy_version():
conn = boto3.client('iam', region_name='us-east-1')
conn.create_policy(
PolicyName="TestSetDefaultPolicyVersion",
PolicyDocument=MOCK_POLICY)
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestSetDefaultPolicyVersion",
PolicyDocument=MOCK_POLICY_2,
SetAsDefault=True)
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestSetDefaultPolicyVersion",
PolicyDocument=MOCK_POLICY_3,
SetAsDefault=True)
versions = conn.list_policy_versions(
PolicyArn="arn:aws:iam::123456789012:policy/TestSetDefaultPolicyVersion")
versions.get('Versions')[0].get('Document').should.equal(json.loads(MOCK_POLICY))
versions.get('Versions')[0].get('IsDefaultVersion').shouldnt.be.ok
versions.get('Versions')[1].get('Document').should.equal(json.loads(MOCK_POLICY_2))
versions.get('Versions')[1].get('IsDefaultVersion').shouldnt.be.ok
versions.get('Versions')[2].get('Document').should.equal(json.loads(MOCK_POLICY_3))
versions.get('Versions')[2].get('IsDefaultVersion').should.be.ok
@mock_iam @mock_iam
@ -320,10 +410,21 @@ def test_get_policy():
conn = boto3.client('iam', region_name='us-east-1') conn = boto3.client('iam', region_name='us-east-1')
response = conn.create_policy( response = conn.create_policy(
PolicyName="TestGetPolicy", PolicyName="TestGetPolicy",
PolicyDocument='{"some":"policy"}') PolicyDocument=MOCK_POLICY)
policy = conn.get_policy( policy = conn.get_policy(
PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicy") PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicy")
response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestGetPolicy") policy['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestGetPolicy")
@mock_iam
def test_get_aws_managed_policy():
conn = boto3.client('iam', region_name='us-east-1')
managed_policy_arn = 'arn:aws:iam::aws:policy/IAMUserChangePassword'
managed_policy_create_date = datetime.strptime("2016-11-15T00:25:16+00:00", "%Y-%m-%dT%H:%M:%S+00:00")
policy = conn.get_policy(
PolicyArn=managed_policy_arn)
policy['Policy']['Arn'].should.equal(managed_policy_arn)
policy['Policy']['CreateDate'].replace(tzinfo=None).should.equal(managed_policy_create_date)
@mock_iam @mock_iam
@ -331,10 +432,10 @@ def test_get_policy_version():
conn = boto3.client('iam', region_name='us-east-1') conn = boto3.client('iam', region_name='us-east-1')
conn.create_policy( conn.create_policy(
PolicyName="TestGetPolicyVersion", PolicyName="TestGetPolicyVersion",
PolicyDocument='{"some":"policy"}') PolicyDocument=MOCK_POLICY)
version = conn.create_policy_version( version = conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion",
PolicyDocument='{"some":"policy"}') PolicyDocument=MOCK_POLICY)
with assert_raises(ClientError): with assert_raises(ClientError):
conn.get_policy_version( conn.get_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion",
@ -342,7 +443,40 @@ def test_get_policy_version():
retrieved = conn.get_policy_version( retrieved = conn.get_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion", PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion",
VersionId=version.get('PolicyVersion').get('VersionId')) VersionId=version.get('PolicyVersion').get('VersionId'))
retrieved.get('PolicyVersion').get('Document').should.equal({'some': 'policy'}) retrieved.get('PolicyVersion').get('Document').should.equal(json.loads(MOCK_POLICY))
retrieved.get('PolicyVersion').get('IsDefaultVersion').shouldnt.be.ok
@mock_iam
def test_get_aws_managed_policy_version():
conn = boto3.client('iam', region_name='us-east-1')
managed_policy_arn = 'arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole'
managed_policy_version_create_date = datetime.strptime("2015-04-09T15:03:43+00:00", "%Y-%m-%dT%H:%M:%S+00:00")
with assert_raises(ClientError):
conn.get_policy_version(
PolicyArn=managed_policy_arn,
VersionId='v2-does-not-exist')
retrieved = conn.get_policy_version(
PolicyArn=managed_policy_arn,
VersionId="v1")
retrieved['PolicyVersion']['CreateDate'].replace(tzinfo=None).should.equal(managed_policy_version_create_date)
retrieved['PolicyVersion']['Document'].should.be.an(dict)
@mock_iam
def test_get_aws_managed_policy_v4_version():
conn = boto3.client('iam', region_name='us-east-1')
managed_policy_arn = 'arn:aws:iam::aws:policy/job-function/SystemAdministrator'
managed_policy_version_create_date = datetime.strptime("2018-10-08T21:33:45+00:00", "%Y-%m-%dT%H:%M:%S+00:00")
with assert_raises(ClientError):
conn.get_policy_version(
PolicyArn=managed_policy_arn,
VersionId='v2-does-not-exist')
retrieved = conn.get_policy_version(
PolicyArn=managed_policy_arn,
VersionId="v4")
retrieved['PolicyVersion']['CreateDate'].replace(tzinfo=None).should.equal(managed_policy_version_create_date)
retrieved['PolicyVersion']['Document'].should.be.an(dict)
@mock_iam @mock_iam
@ -353,22 +487,24 @@ def test_list_policy_versions():
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
conn.create_policy( conn.create_policy(
PolicyName="TestListPolicyVersions", PolicyName="TestListPolicyVersions",
PolicyDocument='{"first":"policy"}') PolicyDocument=MOCK_POLICY)
versions = conn.list_policy_versions( versions = conn.list_policy_versions(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
versions.get('Versions')[0].get('VersionId').should.equal('v1') versions.get('Versions')[0].get('VersionId').should.equal('v1')
versions.get('Versions')[0].get('IsDefaultVersion').should.be.ok
conn.create_policy_version( conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions", PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
PolicyDocument='{"second":"policy"}') PolicyDocument=MOCK_POLICY_2)
conn.create_policy_version( conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions", PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
PolicyDocument='{"third":"policy"}') PolicyDocument=MOCK_POLICY_3)
versions = conn.list_policy_versions( versions = conn.list_policy_versions(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions") PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
print(versions.get('Versions')) versions.get('Versions')[1].get('Document').should.equal(json.loads(MOCK_POLICY_2))
versions.get('Versions')[1].get('Document').should.equal({'second': 'policy'}) versions.get('Versions')[1].get('IsDefaultVersion').shouldnt.be.ok
versions.get('Versions')[2].get('Document').should.equal({'third': 'policy'}) versions.get('Versions')[2].get('Document').should.equal(json.loads(MOCK_POLICY_3))
versions.get('Versions')[2].get('IsDefaultVersion').shouldnt.be.ok
@mock_iam @mock_iam
@ -376,10 +512,10 @@ def test_delete_policy_version():
conn = boto3.client('iam', region_name='us-east-1') conn = boto3.client('iam', region_name='us-east-1')
conn.create_policy( conn.create_policy(
PolicyName="TestDeletePolicyVersion", PolicyName="TestDeletePolicyVersion",
PolicyDocument='{"first":"policy"}') PolicyDocument=MOCK_POLICY)
conn.create_policy_version( conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion", PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
PolicyDocument='{"second":"policy"}') PolicyDocument=MOCK_POLICY)
with assert_raises(ClientError): with assert_raises(ClientError):
conn.delete_policy_version( conn.delete_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion", PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
@ -392,6 +528,21 @@ def test_delete_policy_version():
len(versions.get('Versions')).should.equal(1) len(versions.get('Versions')).should.equal(1)
@mock_iam
def test_delete_default_policy_version():
conn = boto3.client('iam', region_name='us-east-1')
conn.create_policy(
PolicyName="TestDeletePolicyVersion",
PolicyDocument=MOCK_POLICY)
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
PolicyDocument=MOCK_POLICY_2)
with assert_raises(ClientError):
conn.delete_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
VersionId='v1')
@mock_iam_deprecated() @mock_iam_deprecated()
def test_create_user(): def test_create_user():
conn = boto.connect_iam() conn = boto.connect_iam()
@ -446,22 +597,20 @@ def test_list_users():
@mock_iam() @mock_iam()
def test_user_policies(): def test_user_policies():
policy_name = 'UserManagedPolicy' policy_name = 'UserManagedPolicy'
policy_document = "{'mypolicy': 'test'}"
user_name = 'my-user' user_name = 'my-user'
conn = boto3.client('iam', region_name='us-east-1') conn = boto3.client('iam', region_name='us-east-1')
conn.create_user(UserName=user_name) conn.create_user(UserName=user_name)
conn.put_user_policy( conn.put_user_policy(
UserName=user_name, UserName=user_name,
PolicyName=policy_name, PolicyName=policy_name,
PolicyDocument=policy_document PolicyDocument=MOCK_POLICY
) )
policy_doc = conn.get_user_policy( policy_doc = conn.get_user_policy(
UserName=user_name, UserName=user_name,
PolicyName=policy_name PolicyName=policy_name
) )
test = policy_document in policy_doc['PolicyDocument'] policy_doc['PolicyDocument'].should.equal(json.loads(MOCK_POLICY))
test.should.equal(True)
policies = conn.list_user_policies(UserName=user_name) policies = conn.list_user_policies(UserName=user_name)
len(policies['PolicyNames']).should.equal(1) len(policies['PolicyNames']).should.equal(1)
@ -497,13 +646,17 @@ def test_delete_login_profile():
conn.delete_login_profile('my-user') conn.delete_login_profile('my-user')
@mock_iam_deprecated() @mock_iam()
def test_create_access_key(): def test_create_access_key():
conn = boto.connect_iam() conn = boto3.client('iam', region_name='us-east-1')
with assert_raises(BotoServerError): with assert_raises(ClientError):
conn.create_access_key('my-user') conn.create_access_key(UserName='my-user')
conn.create_user('my-user') conn.create_user(UserName='my-user')
conn.create_access_key('my-user') access_key = conn.create_access_key(UserName='my-user')["AccessKey"]
(datetime.utcnow() - access_key["CreateDate"].replace(tzinfo=None)).seconds.should.be.within(0, 10)
access_key["AccessKeyId"].should.have.length_of(20)
access_key["SecretAccessKey"].should.have.length_of(40)
assert access_key["AccessKeyId"].startswith("AKIA")
@mock_iam_deprecated() @mock_iam_deprecated()
@ -622,7 +775,7 @@ def test_managed_policy():
conn = boto.connect_iam() conn = boto.connect_iam()
conn.create_policy(policy_name='UserManagedPolicy', conn.create_policy(policy_name='UserManagedPolicy',
policy_document={'mypolicy': 'test'}, policy_document=MOCK_POLICY,
path='/mypolicy/', path='/mypolicy/',
description='my user managed policy') description='my user managed policy')
@ -723,7 +876,7 @@ def test_attach_detach_user_policy():
policy_name = 'UserAttachedPolicy' policy_name = 'UserAttachedPolicy'
policy = iam.create_policy(PolicyName=policy_name, policy = iam.create_policy(PolicyName=policy_name,
PolicyDocument='{"mypolicy": "test"}', PolicyDocument=MOCK_POLICY,
Path='/mypolicy/', Path='/mypolicy/',
Description='my user attached policy') Description='my user attached policy')
@ -779,7 +932,6 @@ def test_get_access_key_last_used():
@mock_iam @mock_iam
def test_get_account_authorization_details(): def test_get_account_authorization_details():
import json
test_policy = json.dumps({ test_policy = json.dumps({
"Version": "2012-10-17", "Version": "2012-10-17",
"Statement": [ "Statement": [
@ -1211,7 +1363,6 @@ def test_update_role():
@mock_iam() @mock_iam()
def test_list_entities_for_policy(): def test_list_entities_for_policy():
import json
test_policy = json.dumps({ test_policy = json.dumps({
"Version": "2012-10-17", "Version": "2012-10-17",
"Statement": [ "Statement": [

View File

@ -10,6 +10,18 @@ from nose.tools import assert_raises
from boto.exception import BotoServerError from boto.exception import BotoServerError
from moto import mock_iam, mock_iam_deprecated from moto import mock_iam, mock_iam_deprecated
MOCK_POLICY = """
{
"Version": "2012-10-17",
"Statement":
{
"Effect": "Allow",
"Action": "s3:ListBucket",
"Resource": "arn:aws:s3:::example_bucket"
}
}
"""
@mock_iam_deprecated() @mock_iam_deprecated()
def test_create_group(): def test_create_group():
@ -101,7 +113,7 @@ def test_get_groups_for_user():
def test_put_group_policy(): def test_put_group_policy():
conn = boto.connect_iam() conn = boto.connect_iam()
conn.create_group('my-group') conn.create_group('my-group')
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}') conn.put_group_policy('my-group', 'my-policy', MOCK_POLICY)
@mock_iam @mock_iam
@ -131,7 +143,7 @@ def test_get_group_policy():
with assert_raises(BotoServerError): with assert_raises(BotoServerError):
conn.get_group_policy('my-group', 'my-policy') conn.get_group_policy('my-group', 'my-policy')
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}') conn.put_group_policy('my-group', 'my-policy', MOCK_POLICY)
conn.get_group_policy('my-group', 'my-policy') conn.get_group_policy('my-group', 'my-policy')
@ -141,7 +153,7 @@ def test_get_all_group_policies():
conn.create_group('my-group') conn.create_group('my-group')
policies = conn.get_all_group_policies('my-group')['list_group_policies_response']['list_group_policies_result']['policy_names'] policies = conn.get_all_group_policies('my-group')['list_group_policies_response']['list_group_policies_result']['policy_names']
assert policies == [] assert policies == []
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}') conn.put_group_policy('my-group', 'my-policy', MOCK_POLICY)
policies = conn.get_all_group_policies('my-group')['list_group_policies_response']['list_group_policies_result']['policy_names'] policies = conn.get_all_group_policies('my-group')['list_group_policies_response']['list_group_policies_result']['policy_names']
assert policies == ['my-policy'] assert policies == ['my-policy']
@ -151,5 +163,5 @@ def test_list_group_policies():
conn = boto3.client('iam', region_name='us-east-1') conn = boto3.client('iam', region_name='us-east-1')
conn.create_group(GroupName='my-group') conn.create_group(GroupName='my-group')
conn.list_group_policies(GroupName='my-group')['PolicyNames'].should.be.empty conn.list_group_policies(GroupName='my-group')['PolicyNames'].should.be.empty
conn.put_group_policy(GroupName='my-group', PolicyName='my-policy', PolicyDocument='{"some": "json"}') conn.put_group_policy(GroupName='my-group', PolicyName='my-policy', PolicyDocument=MOCK_POLICY)
conn.list_group_policies(GroupName='my-group')['PolicyNames'].should.equal(['my-policy']) conn.list_group_policies(GroupName='my-group')['PolicyNames'].should.equal(['my-policy'])

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@ from moto import mock_kinesis, mock_kinesis_deprecated
def test_create_cluster(): def test_create_cluster():
conn = boto.kinesis.connect_to_region("us-west-2") conn = boto.kinesis.connect_to_region("us-west-2")
conn.create_stream("my_stream", 2) conn.create_stream("my_stream", 3)
stream_response = conn.describe_stream("my_stream") stream_response = conn.describe_stream("my_stream")
@ -27,7 +27,7 @@ def test_create_cluster():
stream["StreamStatus"].should.equal("ACTIVE") stream["StreamStatus"].should.equal("ACTIVE")
shards = stream['Shards'] shards = stream['Shards']
shards.should.have.length_of(2) shards.should.have.length_of(3)
@mock_kinesis_deprecated @mock_kinesis_deprecated

View File

@ -110,6 +110,7 @@ def test_rrset():
changes = ResourceRecordSets(conn, zoneid) changes = ResourceRecordSets(conn, zoneid)
changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A") changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A")
changes.add_change("DELETE", "foo.bar.testdns.aws.com", "TXT")
changes.commit() changes.commit()
changes = ResourceRecordSets(conn, zoneid) changes = ResourceRecordSets(conn, zoneid)
@ -123,12 +124,12 @@ def test_rrset():
rrsets.should.have.length_of(2) rrsets.should.have.length_of(2)
rrsets = conn.get_all_rrsets( rrsets = conn.get_all_rrsets(
zoneid, name="foo.bar.testdns.aws.com", type="A") zoneid, name="bar.foo.testdns.aws.com", type="A")
rrsets.should.have.length_of(1) rrsets.should.have.length_of(1)
rrsets[0].resource_records[0].should.equal('1.2.3.4') rrsets[0].resource_records[0].should.equal('5.6.7.8')
rrsets = conn.get_all_rrsets( rrsets = conn.get_all_rrsets(
zoneid, name="bar.foo.testdns.aws.com", type="A") zoneid, name="foo.bar.testdns.aws.com", type="A")
rrsets.should.have.length_of(2) rrsets.should.have.length_of(2)
resource_records = [rr for rr_set in rrsets for rr in rr_set.resource_records] resource_records = [rr for rr_set in rrsets for rr in rr_set.resource_records]
resource_records.should.contain('1.2.3.4') resource_records.should.contain('1.2.3.4')
@ -617,7 +618,7 @@ def test_change_resource_record_sets_crud_valid():
}) })
cname_alias_record_detail.should_not.contain('ResourceRecords') cname_alias_record_detail.should_not.contain('ResourceRecords')
# Delete record. # Delete record with wrong type.
delete_payload = { delete_payload = {
'Comment': 'delete prod.redis.db', 'Comment': 'delete prod.redis.db',
'Changes': [ 'Changes': [
@ -632,6 +633,23 @@ def test_change_resource_record_sets_crud_valid():
} }
conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=delete_payload) conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=delete_payload)
response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id) response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id)
len(response['ResourceRecordSets']).should.equal(1)
# Delete record.
delete_payload = {
'Comment': 'delete prod.redis.db',
'Changes': [
{
'Action': 'DELETE',
'ResourceRecordSet': {
'Name': 'prod.redis.db',
'Type': 'A',
}
}
]
}
conn.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=delete_payload)
response = conn.list_resource_record_sets(HostedZoneId=hosted_zone_id)
len(response['ResourceRecordSets']).should.equal(0) len(response['ResourceRecordSets']).should.equal(0)

View File

@ -32,6 +32,18 @@ def test_create_and_delete_topic():
topics = topics_json["Topics"] topics = topics_json["Topics"]
topics.should.have.length_of(0) topics.should.have.length_of(0)
@mock_sns
def test_create_topic_with_attributes():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name='some-topic-with-attribute', Attributes={'DisplayName': 'test-topic'})
topics_json = conn.list_topics()
topic_arn = topics_json["Topics"][0]['TopicArn']
attributes = conn.get_topic_attributes(TopicArn=topic_arn)['Attributes']
attributes['DisplayName'].should.equal('test-topic')
@mock_sns @mock_sns
def test_create_topic_should_be_indempodent(): def test_create_topic_should_be_indempodent():
conn = boto3.client("sns", region_name="us-east-1") conn = boto3.client("sns", region_name="us-east-1")

118
update_version_from_git.py Normal file
View File

@ -0,0 +1,118 @@
"""
Adapted from https://github.com/pygame/pygameweb/blob/master/pygameweb/builds/update_version_from_git.py
For updating the version from git.
__init__.py contains a __version__ field.
Update that.
If we are on master, we want to update the version as a pre-release.
git describe --tags
With these:
__init__.py
__version__= '0.0.2'
git describe --tags
0.0.1-22-g729a5ae
We want this:
__init__.py
__version__= '0.0.2.dev22.g729a5ae'
Get the branch/tag name with this.
git symbolic-ref -q --short HEAD || git describe --tags --exact-match
"""
import io
import os
import re
import subprocess
def migrate_source_attribute(attr, to_this, target_file, regex):
"""Updates __magic__ attributes in the source file"""
change_this = re.compile(regex, re.S)
new_file = []
found = False
with open(target_file, 'r') as fp:
lines = fp.readlines()
for line in lines:
if line.startswith(attr):
found = True
line = re.sub(change_this, to_this, line)
new_file.append(line)
if found:
with open(target_file, 'w') as fp:
fp.writelines(new_file)
def migrate_version(target_file, new_version):
"""Updates __version__ in the source file"""
regex = r"['\"](.*)['\"]"
migrate_source_attribute('__version__', "'{new_version}'".format(new_version=new_version), target_file, regex)
def is_master_branch():
cmd = ('git rev-parse --abbrev-ref HEAD')
tag_branch = subprocess.check_output(cmd, shell=True)
return tag_branch in [b'master\n']
def git_tag_name():
cmd = ('git describe --tags')
tag_branch = subprocess.check_output(cmd, shell=True)
tag_branch = tag_branch.decode().strip()
return tag_branch
def get_git_version_info():
cmd = 'git describe --tags'
ver_str = subprocess.check_output(cmd, shell=True)
ver, commits_since, githash = ver_str.decode().strip().split('-')
return ver, commits_since, githash
def prerelease_version():
""" return what the prerelease version should be.
https://packaging.python.org/tutorials/distributing-packages/#pre-release-versioning
0.0.2.dev22
"""
ver, commits_since, githash = get_git_version_info()
initpy_ver = get_version()
assert len(initpy_ver.split('.')) in [3, 4], 'moto/__init__.py version should be like 0.0.2 or 0.0.2.dev'
assert initpy_ver > ver, 'the moto/__init__.py version should be newer than the last tagged release.'
return '{initpy_ver}.dev{commits_since}'.format(initpy_ver=initpy_ver, commits_since=commits_since)
def read(*parts):
""" Reads in file from *parts.
"""
try:
return io.open(os.path.join(*parts), 'r', encoding='utf-8').read()
except IOError:
return ''
def get_version():
""" Returns version from moto/__init__.py
"""
version_file = read('moto', '__init__.py')
version_match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]',
version_file, re.MULTILINE)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
def release_version_correct():
"""Makes sure the:
- prerelease verion for master is correct.
- release version is correct for tags.
"""
if is_master_branch():
# update for a pre release version.
initpy = os.path.abspath("moto/__init__.py")
new_version = prerelease_version()
print('updating version in __init__.py to {new_version}'.format(new_version=new_version))
migrate_version(initpy, new_version)
else:
# check that we are a tag with the same version as in __init__.py
assert get_version() == git_tag_name(), 'git tag/branch name not the same as moto/__init__.py __verion__'
if __name__ == '__main__':
release_version_correct()