Merge pull request #4 from acsbendi/create-access-key-fix
Create access key fix
This commit is contained in:
commit
94472c3890
26
.travis.yml
26
.travis.yml
@ -13,9 +13,6 @@ env:
|
|||||||
before_install:
|
before_install:
|
||||||
- export BOTO_CONFIG=/dev/null
|
- export BOTO_CONFIG=/dev/null
|
||||||
install:
|
install:
|
||||||
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use
|
|
||||||
# -d for docker run so the logs show up in travis
|
|
||||||
# Python images come from here: https://hub.docker.com/_/python/
|
|
||||||
- |
|
- |
|
||||||
python setup.py sdist
|
python setup.py sdist
|
||||||
|
|
||||||
@ -35,3 +32,26 @@ script:
|
|||||||
- make test
|
- make test
|
||||||
after_success:
|
after_success:
|
||||||
- coveralls
|
- coveralls
|
||||||
|
before_deploy:
|
||||||
|
- git checkout $TRAVIS_BRANCH
|
||||||
|
- git fetch --unshallow
|
||||||
|
- python update_version_from_git.py
|
||||||
|
deploy:
|
||||||
|
- provider: pypi
|
||||||
|
distributions: sdist bdist_wheel
|
||||||
|
user: spulec
|
||||||
|
password:
|
||||||
|
secure: NxnPylnTfekJmGyoufCw0lMoYRskSMJzvAIyAlJJVYKwEhmiCPOrdy5qV8i8mRZ1AkUsqU3jBZ/PD56n96clHW0E3d080UleRDj6JpyALVdeLfMqZl9kLmZ8bqakWzYq3VSJKw2zGP/L4tPGf8wTK1SUv9yl/YNDsBdCkjDverw=
|
||||||
|
on:
|
||||||
|
branch:
|
||||||
|
- master
|
||||||
|
skip_cleanup: true
|
||||||
|
skip_existing: true
|
||||||
|
- provider: pypi
|
||||||
|
distributions: sdist bdist_wheel
|
||||||
|
user: spulec
|
||||||
|
password:
|
||||||
|
secure: NxnPylnTfekJmGyoufCw0lMoYRskSMJzvAIyAlJJVYKwEhmiCPOrdy5qV8i8mRZ1AkUsqU3jBZ/PD56n96clHW0E3d080UleRDj6JpyALVdeLfMqZl9kLmZ8bqakWzYq3VSJKw2zGP/L4tPGf8wTK1SUv9yl/YNDsBdCkjDverw=
|
||||||
|
on:
|
||||||
|
tags: true
|
||||||
|
skip_existing: true
|
||||||
|
@ -2012,23 +2012,23 @@
|
|||||||
- [ ] upload_archive
|
- [ ] upload_archive
|
||||||
- [ ] upload_multipart_part
|
- [ ] upload_multipart_part
|
||||||
|
|
||||||
## glue - 0% implemented
|
## glue - 23% implemented
|
||||||
- [ ] batch_create_partition
|
- [x] batch_create_partition
|
||||||
- [ ] batch_delete_connection
|
- [ ] batch_delete_connection
|
||||||
- [ ] batch_delete_partition
|
- [x] batch_delete_partition
|
||||||
- [ ] batch_delete_table
|
- [x] batch_delete_table
|
||||||
- [ ] batch_delete_table_version
|
- [ ] batch_delete_table_version
|
||||||
- [ ] batch_get_partition
|
- [ ] batch_get_partition
|
||||||
- [ ] batch_stop_job_run
|
- [ ] batch_stop_job_run
|
||||||
- [ ] create_classifier
|
- [ ] create_classifier
|
||||||
- [ ] create_connection
|
- [ ] create_connection
|
||||||
- [ ] create_crawler
|
- [ ] create_crawler
|
||||||
- [ ] create_database
|
- [x] create_database
|
||||||
- [ ] create_dev_endpoint
|
- [ ] create_dev_endpoint
|
||||||
- [ ] create_job
|
- [ ] create_job
|
||||||
- [ ] create_partition
|
- [x] create_partition
|
||||||
- [ ] create_script
|
- [ ] create_script
|
||||||
- [ ] create_table
|
- [x] create_table
|
||||||
- [ ] create_trigger
|
- [ ] create_trigger
|
||||||
- [ ] create_user_defined_function
|
- [ ] create_user_defined_function
|
||||||
- [ ] delete_classifier
|
- [ ] delete_classifier
|
||||||
@ -2037,8 +2037,8 @@
|
|||||||
- [ ] delete_database
|
- [ ] delete_database
|
||||||
- [ ] delete_dev_endpoint
|
- [ ] delete_dev_endpoint
|
||||||
- [ ] delete_job
|
- [ ] delete_job
|
||||||
- [ ] delete_partition
|
- [x] delete_partition
|
||||||
- [ ] delete_table
|
- [x] delete_table
|
||||||
- [ ] delete_table_version
|
- [ ] delete_table_version
|
||||||
- [ ] delete_trigger
|
- [ ] delete_trigger
|
||||||
- [ ] delete_user_defined_function
|
- [ ] delete_user_defined_function
|
||||||
@ -2050,7 +2050,7 @@
|
|||||||
- [ ] get_crawler
|
- [ ] get_crawler
|
||||||
- [ ] get_crawler_metrics
|
- [ ] get_crawler_metrics
|
||||||
- [ ] get_crawlers
|
- [ ] get_crawlers
|
||||||
- [ ] get_database
|
- [x] get_database
|
||||||
- [ ] get_databases
|
- [ ] get_databases
|
||||||
- [ ] get_dataflow_graph
|
- [ ] get_dataflow_graph
|
||||||
- [ ] get_dev_endpoint
|
- [ ] get_dev_endpoint
|
||||||
@ -2060,13 +2060,13 @@
|
|||||||
- [ ] get_job_runs
|
- [ ] get_job_runs
|
||||||
- [ ] get_jobs
|
- [ ] get_jobs
|
||||||
- [ ] get_mapping
|
- [ ] get_mapping
|
||||||
- [ ] get_partition
|
- [x] get_partition
|
||||||
- [ ] get_partitions
|
- [x] get_partitions
|
||||||
- [ ] get_plan
|
- [ ] get_plan
|
||||||
- [ ] get_table
|
- [x] get_table
|
||||||
- [ ] get_table_version
|
- [x] get_table_version
|
||||||
- [ ] get_table_versions
|
- [x] get_table_versions
|
||||||
- [ ] get_tables
|
- [x] get_tables
|
||||||
- [ ] get_trigger
|
- [ ] get_trigger
|
||||||
- [ ] get_triggers
|
- [ ] get_triggers
|
||||||
- [ ] get_user_defined_function
|
- [ ] get_user_defined_function
|
||||||
@ -2087,8 +2087,8 @@
|
|||||||
- [ ] update_database
|
- [ ] update_database
|
||||||
- [ ] update_dev_endpoint
|
- [ ] update_dev_endpoint
|
||||||
- [ ] update_job
|
- [ ] update_job
|
||||||
- [ ] update_partition
|
- [x] update_partition
|
||||||
- [ ] update_table
|
- [x] update_table
|
||||||
- [ ] update_trigger
|
- [ ] update_trigger
|
||||||
- [ ] update_user_defined_function
|
- [ ] update_user_defined_function
|
||||||
|
|
||||||
|
96
README.md
96
README.md
@ -55,95 +55,95 @@ With the decorator wrapping the test, all the calls to s3 are automatically mock
|
|||||||
It gets even better! Moto isn't just for Python code and it isn't just for S3. Look at the [standalone server mode](https://github.com/spulec/moto#stand-alone-server-mode) for more information about running Moto with other languages. Here's the status of the other AWS services implemented:
|
It gets even better! Moto isn't just for Python code and it isn't just for S3. Look at the [standalone server mode](https://github.com/spulec/moto#stand-alone-server-mode) for more information about running Moto with other languages. Here's the status of the other AWS services implemented:
|
||||||
|
|
||||||
```gherkin
|
```gherkin
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Service Name | Decorator | Development Status |
|
| Service Name | Decorator | Development Status |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| ACM | @mock_acm | all endpoints done |
|
| ACM | @mock_acm | all endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| API Gateway | @mock_apigateway | core endpoints done |
|
| API Gateway | @mock_apigateway | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Autoscaling | @mock_autoscaling | core endpoints done |
|
| Autoscaling | @mock_autoscaling | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Cloudformation | @mock_cloudformation | core endpoints done |
|
| Cloudformation | @mock_cloudformation | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Cloudwatch | @mock_cloudwatch | basic endpoints done |
|
| Cloudwatch | @mock_cloudwatch | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| CloudwatchEvents | @mock_events | all endpoints done |
|
| CloudwatchEvents | @mock_events | all endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Cognito Identity | @mock_cognitoidentity | basic endpoints done |
|
| Cognito Identity | @mock_cognitoidentity | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Cognito Identity Provider | @mock_cognitoidp | basic endpoints done |
|
| Cognito Identity Provider | @mock_cognitoidp | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Config | @mock_config | basic endpoints done |
|
| Config | @mock_config | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Data Pipeline | @mock_datapipeline | basic endpoints done |
|
| Data Pipeline | @mock_datapipeline | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| DynamoDB | @mock_dynamodb | core endpoints done |
|
| DynamoDB | @mock_dynamodb | core endpoints done |
|
||||||
| DynamoDB2 | @mock_dynamodb2 | all endpoints + partial indexes |
|
| DynamoDB2 | @mock_dynamodb2 | all endpoints + partial indexes |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| EC2 | @mock_ec2 | core endpoints done |
|
| EC2 | @mock_ec2 | core endpoints done |
|
||||||
| - AMI | | core endpoints done |
|
| - AMI | | core endpoints done |
|
||||||
| - EBS | | core endpoints done |
|
| - EBS | | core endpoints done |
|
||||||
| - Instances | | all endpoints done |
|
| - Instances | | all endpoints done |
|
||||||
| - Security Groups | | core endpoints done |
|
| - Security Groups | | core endpoints done |
|
||||||
| - Tags | | all endpoints done |
|
| - Tags | | all endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| ECR | @mock_ecr | basic endpoints done |
|
| ECR | @mock_ecr | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| ECS | @mock_ecs | basic endpoints done |
|
| ECS | @mock_ecs | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| ELB | @mock_elb | core endpoints done |
|
| ELB | @mock_elb | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| ELBv2 | @mock_elbv2 | all endpoints done |
|
| ELBv2 | @mock_elbv2 | all endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| EMR | @mock_emr | core endpoints done |
|
| EMR | @mock_emr | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Glacier | @mock_glacier | core endpoints done |
|
| Glacier | @mock_glacier | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| IAM | @mock_iam | core endpoints done |
|
| IAM | @mock_iam | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| IoT | @mock_iot | core endpoints done |
|
| IoT | @mock_iot | core endpoints done |
|
||||||
| | @mock_iotdata | core endpoints done |
|
| | @mock_iotdata | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
|
| Kinesis | @mock_kinesis | core endpoints done |
|
||||||
|
|-------------------------------------------------------------------------------------|
|
||||||
|
| KMS | @mock_kms | basic endpoints done |
|
||||||
|
|-------------------------------------------------------------------------------------|
|
||||||
| Lambda | @mock_lambda | basic endpoints done, requires |
|
| Lambda | @mock_lambda | basic endpoints done, requires |
|
||||||
| | | docker |
|
| | | docker |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Logs | @mock_logs | basic endpoints done |
|
| Logs | @mock_logs | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Kinesis | @mock_kinesis | core endpoints done |
|
|
||||||
|------------------------------------------------------------------------------|
|
|
||||||
| KMS | @mock_kms | basic endpoints done |
|
|
||||||
|------------------------------------------------------------------------------|
|
|
||||||
| Organizations | @mock_organizations | some core endpoints done |
|
| Organizations | @mock_organizations | some core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Polly | @mock_polly | all endpoints done |
|
| Polly | @mock_polly | all endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| RDS | @mock_rds | core endpoints done |
|
| RDS | @mock_rds | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| RDS2 | @mock_rds2 | core endpoints done |
|
| RDS2 | @mock_rds2 | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Redshift | @mock_redshift | core endpoints done |
|
| Redshift | @mock_redshift | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| Route53 | @mock_route53 | core endpoints done |
|
| Route53 | @mock_route53 | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| S3 | @mock_s3 | core endpoints done |
|
| S3 | @mock_s3 | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| SecretsManager | @mock_secretsmanager | basic endpoints done
|
| SecretsManager | @mock_secretsmanager | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| SES | @mock_ses | all endpoints done |
|
| SES | @mock_ses | all endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| SNS | @mock_sns | all endpoints done |
|
| SNS | @mock_sns | all endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| SQS | @mock_sqs | core endpoints done |
|
| SQS | @mock_sqs | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| SSM | @mock_ssm | core endpoints done |
|
| SSM | @mock_ssm | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| STS | @mock_sts | core endpoints done |
|
| STS | @mock_sts | core endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| SWF | @mock_swf | basic endpoints done |
|
| SWF | @mock_swf | basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
| X-Ray | @mock_xray | all endpoints done |
|
| X-Ray | @mock_xray | all endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|
|
||||||
```
|
```
|
||||||
|
|
||||||
For a full list of endpoint [implementation coverage](https://github.com/spulec/moto/blob/master/IMPLEMENTATION_COVERAGE.md)
|
For a full list of endpoint [implementation coverage](https://github.com/spulec/moto/blob/master/IMPLEMENTATION_COVERAGE.md)
|
||||||
@ -318,3 +318,11 @@ boto3.resource(
|
|||||||
```console
|
```console
|
||||||
$ pip install moto
|
$ pip install moto
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Releases
|
||||||
|
|
||||||
|
Releases are done from travisci. Fairly closely following this:
|
||||||
|
https://docs.travis-ci.com/user/deployment/pypi/
|
||||||
|
|
||||||
|
- Commits to `master` branch do a dev deploy to pypi.
|
||||||
|
- Commits to a tag do a real deploy to pypi.
|
||||||
|
@ -17,66 +17,95 @@ with ``moto`` and its usage.
|
|||||||
Currently implemented Services:
|
Currently implemented Services:
|
||||||
-------------------------------
|
-------------------------------
|
||||||
|
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Service Name | Decorator | Development Status |
|
| Service Name | Decorator | Development Status |
|
||||||
+=======================+=====================+===================================+
|
+===========================+=======================+====================================+
|
||||||
|
| ACM | @mock_acm | all endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| API Gateway | @mock_apigateway | core endpoints done |
|
| API Gateway | @mock_apigateway | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Autoscaling | @mock_autoscaling | core endpoints done |
|
| Autoscaling | @mock_autoscaling | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Cloudformation | @mock_cloudformation | core endpoints done |
|
| Cloudformation | @mock_cloudformation | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Cloudwatch | @mock_cloudwatch | basic endpoints done |
|
| Cloudwatch | @mock_cloudwatch | basic endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| CloudwatchEvents | @mock_events | all endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| Cognito Identity | @mock_cognitoidentity | all endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| Cognito Identity Provider | @mock_cognitoidp | all endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| Config | @mock_config | basic endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Data Pipeline | @mock_datapipeline | basic endpoints done |
|
| Data Pipeline | @mock_datapipeline | basic endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| - DynamoDB | - @mock_dynamodb | - core endpoints done |
|
| DynamoDB | - @mock_dynamodb | - core endpoints done |
|
||||||
| - DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes|
|
| DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| EC2 | @mock_ec2 | core endpoints done |
|
| EC2 | @mock_ec2 | core endpoints done |
|
||||||
| - AMI | | - core endpoints done |
|
| - AMI | | - core endpoints done |
|
||||||
| - EBS | | - core endpoints done |
|
| - EBS | | - core endpoints done |
|
||||||
| - Instances | | - all endpoints done |
|
| - Instances | | - all endpoints done |
|
||||||
| - Security Groups | | - core endpoints done |
|
| - Security Groups | | - core endpoints done |
|
||||||
| - Tags | | - all endpoints done |
|
| - Tags | | - all endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| ECR | @mock_ecr | basic endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| ECS | @mock_ecs | basic endpoints done |
|
| ECS | @mock_ecs | basic endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| ELB | @mock_elb | core endpoints done |
|
| ELB | @mock_elb | core endpoints done |
|
||||||
| | @mock_elbv2 | core endpoints done |
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
| ELBv2 | @mock_elbv2 | all endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| EMR | @mock_emr | core endpoints done |
|
| EMR | @mock_emr | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Glacier | @mock_glacier | core endpoints done |
|
| Glacier | @mock_glacier | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| IAM | @mock_iam | core endpoints done |
|
| IAM | @mock_iam | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Lambda | @mock_lambda | basic endpoints done |
|
| IoT | @mock_iot | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
| | @mock_iotdata | core endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Kinesis | @mock_kinesis | core endpoints done |
|
| Kinesis | @mock_kinesis | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| KMS | @mock_kms | basic endpoints done |
|
| KMS | @mock_kms | basic endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| Lambda | @mock_lambda | basic endpoints done, |
|
||||||
|
| | | requires docker |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| Logs | @mock_logs | basic endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| Organizations | @mock_organizations | some core edpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| Polly | @mock_polly | all endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| RDS | @mock_rds | core endpoints done |
|
| RDS | @mock_rds | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| RDS2 | @mock_rds2 | core endpoints done |
|
| RDS2 | @mock_rds2 | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Redshift | @mock_redshift | core endpoints done |
|
| Redshift | @mock_redshift | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| Route53 | @mock_route53 | core endpoints done |
|
| Route53 | @mock_route53 | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| S3 | @mock_s3 | core endpoints done |
|
| S3 | @mock_s3 | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| SES | @mock_ses | core endpoints done |
|
| SecretsManager | @mock_secretsmanager | basic endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| SNS | @mock_sns | core endpoints done |
|
| SES | @mock_ses | all endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| SNS | @mock_sns | all endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| SQS | @mock_sqs | core endpoints done |
|
| SQS | @mock_sqs | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| SSM | @mock_ssm | core endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| STS | @mock_sts | core endpoints done |
|
| STS | @mock_sts | core endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
| SWF | @mock_swf | basic endpoints done |
|
| SWF | @mock_swf | basic endpoints done |
|
||||||
+-----------------------+---------------------+-----------------------------------+
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
| X-Ray | @mock_xray | all endpoints done |
|
||||||
|
+---------------------------+-----------------------+------------------------------------+
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ import logging
|
|||||||
# logging.getLogger('boto').setLevel(logging.CRITICAL)
|
# logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
__title__ = 'moto'
|
__title__ = 'moto'
|
||||||
__version__ = '1.3.8'
|
__version__ = '1.3.9'
|
||||||
|
|
||||||
from .acm import mock_acm # flake8: noqa
|
from .acm import mock_acm # flake8: noqa
|
||||||
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
|
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
|
||||||
|
@ -246,7 +246,8 @@ def resource_name_property_from_type(resource_type):
|
|||||||
|
|
||||||
|
|
||||||
def generate_resource_name(resource_type, stack_name, logical_id):
|
def generate_resource_name(resource_type, stack_name, logical_id):
|
||||||
if resource_type == "AWS::ElasticLoadBalancingV2::TargetGroup":
|
if resource_type in ["AWS::ElasticLoadBalancingV2::TargetGroup",
|
||||||
|
"AWS::ElasticLoadBalancingV2::LoadBalancer"]:
|
||||||
# Target group names need to be less than 32 characters, so when cloudformation creates a name for you
|
# Target group names need to be less than 32 characters, so when cloudformation creates a name for you
|
||||||
# it makes sure to stay under that limit
|
# it makes sure to stay under that limit
|
||||||
name_prefix = '{0}-{1}'.format(stack_name, logical_id)
|
name_prefix = '{0}-{1}'.format(stack_name, logical_id)
|
||||||
|
@ -4,6 +4,7 @@ import six
|
|||||||
import random
|
import random
|
||||||
import yaml
|
import yaml
|
||||||
import os
|
import os
|
||||||
|
import string
|
||||||
|
|
||||||
from cfnlint import decode, core
|
from cfnlint import decode, core
|
||||||
|
|
||||||
@ -29,7 +30,7 @@ def generate_stackset_arn(stackset_id, region_name):
|
|||||||
|
|
||||||
def random_suffix():
|
def random_suffix():
|
||||||
size = 12
|
size = 12
|
||||||
chars = list(range(10)) + ['A-Z']
|
chars = list(range(10)) + list(string.ascii_uppercase)
|
||||||
return ''.join(six.text_type(random.choice(chars)) for x in range(size))
|
return ''.join(six.text_type(random.choice(chars)) for x in range(size))
|
||||||
|
|
||||||
|
|
||||||
|
@ -724,7 +724,7 @@ class Table(BaseModel):
|
|||||||
if idx_col_set.issubset(set(hash_set.attrs)):
|
if idx_col_set.issubset(set(hash_set.attrs)):
|
||||||
yield hash_set
|
yield hash_set
|
||||||
|
|
||||||
def scan(self, filters, limit, exclusive_start_key, filter_expression=None, index_name=None):
|
def scan(self, filters, limit, exclusive_start_key, filter_expression=None, index_name=None, projection_expression=None):
|
||||||
results = []
|
results = []
|
||||||
scanned_count = 0
|
scanned_count = 0
|
||||||
all_indexes = self.all_indexes()
|
all_indexes = self.all_indexes()
|
||||||
@ -763,6 +763,14 @@ class Table(BaseModel):
|
|||||||
if passes_all_conditions:
|
if passes_all_conditions:
|
||||||
results.append(item)
|
results.append(item)
|
||||||
|
|
||||||
|
if projection_expression:
|
||||||
|
expressions = [x.strip() for x in projection_expression.split(',')]
|
||||||
|
results = copy.deepcopy(results)
|
||||||
|
for result in results:
|
||||||
|
for attr in list(result.attrs):
|
||||||
|
if attr not in expressions:
|
||||||
|
result.attrs.pop(attr)
|
||||||
|
|
||||||
results, last_evaluated_key = self._trim_results(results, limit,
|
results, last_evaluated_key = self._trim_results(results, limit,
|
||||||
exclusive_start_key, index_name)
|
exclusive_start_key, index_name)
|
||||||
return results, scanned_count, last_evaluated_key
|
return results, scanned_count, last_evaluated_key
|
||||||
@ -962,7 +970,7 @@ class DynamoDBBackend(BaseBackend):
|
|||||||
return table.query(hash_key, range_comparison, range_values, limit,
|
return table.query(hash_key, range_comparison, range_values, limit,
|
||||||
exclusive_start_key, scan_index_forward, projection_expression, index_name, filter_expression, **filter_kwargs)
|
exclusive_start_key, scan_index_forward, projection_expression, index_name, filter_expression, **filter_kwargs)
|
||||||
|
|
||||||
def scan(self, table_name, filters, limit, exclusive_start_key, filter_expression, expr_names, expr_values, index_name):
|
def scan(self, table_name, filters, limit, exclusive_start_key, filter_expression, expr_names, expr_values, index_name, projection_expression):
|
||||||
table = self.tables.get(table_name)
|
table = self.tables.get(table_name)
|
||||||
if not table:
|
if not table:
|
||||||
return None, None, None
|
return None, None, None
|
||||||
@ -977,7 +985,9 @@ class DynamoDBBackend(BaseBackend):
|
|||||||
else:
|
else:
|
||||||
filter_expression = Op(None, None) # Will always eval to true
|
filter_expression = Op(None, None) # Will always eval to true
|
||||||
|
|
||||||
return table.scan(scan_filters, limit, exclusive_start_key, filter_expression, index_name)
|
projection_expression = ','.join([expr_names.get(attr, attr) for attr in projection_expression.replace(' ', '').split(',')])
|
||||||
|
|
||||||
|
return table.scan(scan_filters, limit, exclusive_start_key, filter_expression, index_name, projection_expression)
|
||||||
|
|
||||||
def update_item(self, table_name, key, update_expression, attribute_updates, expression_attribute_names,
|
def update_item(self, table_name, key, update_expression, attribute_updates, expression_attribute_names,
|
||||||
expression_attribute_values, expected=None):
|
expression_attribute_values, expected=None):
|
||||||
|
@ -166,7 +166,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
when BillingMode is PAY_PER_REQUEST')
|
when BillingMode is PAY_PER_REQUEST')
|
||||||
throughput = None
|
throughput = None
|
||||||
else: # Provisioned (default billing mode)
|
else: # Provisioned (default billing mode)
|
||||||
throughput = body["ProvisionedThroughput"]
|
throughput = body.get("ProvisionedThroughput")
|
||||||
# getting the schema
|
# getting the schema
|
||||||
key_schema = body['KeySchema']
|
key_schema = body['KeySchema']
|
||||||
# getting attribute definition
|
# getting attribute definition
|
||||||
@ -558,7 +558,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
filter_expression = self.body.get('FilterExpression')
|
filter_expression = self.body.get('FilterExpression')
|
||||||
expression_attribute_values = self.body.get('ExpressionAttributeValues', {})
|
expression_attribute_values = self.body.get('ExpressionAttributeValues', {})
|
||||||
expression_attribute_names = self.body.get('ExpressionAttributeNames', {})
|
expression_attribute_names = self.body.get('ExpressionAttributeNames', {})
|
||||||
|
projection_expression = self.body.get('ProjectionExpression', '')
|
||||||
exclusive_start_key = self.body.get('ExclusiveStartKey')
|
exclusive_start_key = self.body.get('ExclusiveStartKey')
|
||||||
limit = self.body.get("Limit")
|
limit = self.body.get("Limit")
|
||||||
index_name = self.body.get('IndexName')
|
index_name = self.body.get('IndexName')
|
||||||
@ -570,7 +570,8 @@ class DynamoHandler(BaseResponse):
|
|||||||
filter_expression,
|
filter_expression,
|
||||||
expression_attribute_names,
|
expression_attribute_names,
|
||||||
expression_attribute_values,
|
expression_attribute_values,
|
||||||
index_name)
|
index_name,
|
||||||
|
projection_expression)
|
||||||
except InvalidIndexNameError as err:
|
except InvalidIndexNameError as err:
|
||||||
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
|
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
|
||||||
return self.error(er, str(err))
|
return self.error(er, str(err))
|
||||||
|
@ -403,7 +403,10 @@ class ECRBackend(BaseBackend):
|
|||||||
image_found = True
|
image_found = True
|
||||||
repository.images[num].image_tag = image_id["imageTag"]
|
repository.images[num].image_tag = image_id["imageTag"]
|
||||||
response["imageIds"].append(image.response_batch_delete_image)
|
response["imageIds"].append(image.response_batch_delete_image)
|
||||||
|
if len(image.image_tags) > 1:
|
||||||
repository.images[num].remove_tag(image_id["imageTag"])
|
repository.images[num].remove_tag(image_id["imageTag"])
|
||||||
|
else:
|
||||||
|
repository.images.remove(image)
|
||||||
|
|
||||||
if not image_found:
|
if not image_found:
|
||||||
failure_response = {
|
failure_response = {
|
||||||
|
@ -138,6 +138,12 @@ class FakeTable(BaseModel):
|
|||||||
raise PartitionAlreadyExistsException()
|
raise PartitionAlreadyExistsException()
|
||||||
self.partitions[key] = partition
|
self.partitions[key] = partition
|
||||||
|
|
||||||
|
def delete_partition(self, values):
|
||||||
|
try:
|
||||||
|
del self.partitions[str(values)]
|
||||||
|
except KeyError:
|
||||||
|
raise PartitionNotFoundException()
|
||||||
|
|
||||||
|
|
||||||
class FakePartition(BaseModel):
|
class FakePartition(BaseModel):
|
||||||
def __init__(self, database_name, table_name, partiton_input):
|
def __init__(self, database_name, table_name, partiton_input):
|
||||||
|
@ -6,6 +6,7 @@ from moto.core.responses import BaseResponse
|
|||||||
from .models import glue_backend
|
from .models import glue_backend
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
PartitionAlreadyExistsException,
|
PartitionAlreadyExistsException,
|
||||||
|
PartitionNotFoundException,
|
||||||
TableNotFoundException
|
TableNotFoundException
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -184,3 +185,38 @@ class GlueResponse(BaseResponse):
|
|||||||
table.update_partition(part_to_update, part_input)
|
table.update_partition(part_to_update, part_input)
|
||||||
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
def delete_partition(self):
|
||||||
|
database_name = self.parameters.get('DatabaseName')
|
||||||
|
table_name = self.parameters.get('TableName')
|
||||||
|
part_to_delete = self.parameters.get('PartitionValues')
|
||||||
|
|
||||||
|
table = self.glue_backend.get_table(database_name, table_name)
|
||||||
|
table.delete_partition(part_to_delete)
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def batch_delete_partition(self):
|
||||||
|
database_name = self.parameters.get('DatabaseName')
|
||||||
|
table_name = self.parameters.get('TableName')
|
||||||
|
table = self.glue_backend.get_table(database_name, table_name)
|
||||||
|
|
||||||
|
errors_output = []
|
||||||
|
for part_input in self.parameters.get('PartitionsToDelete'):
|
||||||
|
values = part_input.get('Values')
|
||||||
|
try:
|
||||||
|
table.delete_partition(values)
|
||||||
|
except PartitionNotFoundException:
|
||||||
|
errors_output.append({
|
||||||
|
'PartitionValues': values,
|
||||||
|
'ErrorDetail': {
|
||||||
|
'ErrorCode': 'EntityNotFoundException',
|
||||||
|
'ErrorMessage': 'Partition not found',
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
out = {}
|
||||||
|
if errors_output:
|
||||||
|
out['Errors'] = errors_output
|
||||||
|
|
||||||
|
return json.dumps(out)
|
||||||
|
@ -299,8 +299,8 @@ class AccessKey(BaseModel):
|
|||||||
|
|
||||||
def __init__(self, user_name):
|
def __init__(self, user_name):
|
||||||
self.user_name = user_name
|
self.user_name = user_name
|
||||||
self.access_key_id = random_access_key()
|
self.access_key_id = "AKIA" + random_access_key()
|
||||||
self.secret_access_key = random_alphanumeric(32)
|
self.secret_access_key = random_alphanumeric(40)
|
||||||
self.status = 'Active'
|
self.status = 'Active'
|
||||||
self.create_date = datetime.utcnow()
|
self.create_date = datetime.utcnow()
|
||||||
self.last_used = datetime.utcnow()
|
self.last_used = datetime.utcnow()
|
||||||
|
@ -89,8 +89,8 @@ VALID_RESOURCE_PATH_STARTING_VALUES = {
|
|||||||
class IAMPolicyDocumentValidator:
|
class IAMPolicyDocumentValidator:
|
||||||
|
|
||||||
def __init__(self, policy_document):
|
def __init__(self, policy_document):
|
||||||
self._policy_document: str = policy_document
|
self._policy_document = policy_document
|
||||||
self._policy_json: dict = {}
|
self._policy_json = {}
|
||||||
self._statements = []
|
self._statements = []
|
||||||
self._resource_error = "" # the first resource error found that does not generate a legacy parsing error
|
self._resource_error = "" # the first resource error found that does not generate a legacy parsing error
|
||||||
|
|
||||||
|
@ -1493,6 +1493,7 @@ CREATE_ACCESS_KEY_TEMPLATE = """<CreateAccessKeyResponse>
|
|||||||
<AccessKeyId>{{ key.access_key_id }}</AccessKeyId>
|
<AccessKeyId>{{ key.access_key_id }}</AccessKeyId>
|
||||||
<Status>{{ key.status }}</Status>
|
<Status>{{ key.status }}</Status>
|
||||||
<SecretAccessKey>{{ key.secret_access_key }}</SecretAccessKey>
|
<SecretAccessKey>{{ key.secret_access_key }}</SecretAccessKey>
|
||||||
|
<CreateDate>{{ key.created_iso_8601 }}</CreateDate>
|
||||||
</AccessKey>
|
</AccessKey>
|
||||||
</CreateAccessKeyResult>
|
</CreateAccessKeyResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
|
@ -7,7 +7,7 @@ import six
|
|||||||
def random_alphanumeric(length):
|
def random_alphanumeric(length):
|
||||||
return ''.join(six.text_type(
|
return ''.join(six.text_type(
|
||||||
random.choice(
|
random.choice(
|
||||||
string.ascii_letters + string.digits
|
string.ascii_letters + string.digits + "+" + "/"
|
||||||
)) for _ in range(length)
|
)) for _ in range(length)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -123,17 +123,12 @@ class Stream(BaseModel):
|
|||||||
self.tags = {}
|
self.tags = {}
|
||||||
self.status = "ACTIVE"
|
self.status = "ACTIVE"
|
||||||
|
|
||||||
if six.PY3:
|
step = 2**128 // shard_count
|
||||||
izip_longest = itertools.zip_longest
|
hash_ranges = itertools.chain(map(lambda i: (i, i * step, (i + 1) * step),
|
||||||
else:
|
range(shard_count - 1)),
|
||||||
izip_longest = itertools.izip_longest
|
[(shard_count - 1, (shard_count - 1) * step, 2**128)])
|
||||||
|
for index, start, end in hash_ranges:
|
||||||
|
|
||||||
for index, start, end in izip_longest(range(shard_count),
|
|
||||||
range(0, 2**128, 2 **
|
|
||||||
128 // shard_count),
|
|
||||||
range(2**128 // shard_count, 2 **
|
|
||||||
128, 2**128 // shard_count),
|
|
||||||
fillvalue=2**128):
|
|
||||||
shard = Shard(index, start, end)
|
shard = Shard(index, start, end)
|
||||||
self.shards[shard.shard_id] = shard
|
self.shards[shard.shard_id] = shard
|
||||||
|
|
||||||
|
@ -165,6 +165,12 @@ class RecordSet(BaseModel):
|
|||||||
hosted_zone.delete_rrset_by_name(self.name)
|
hosted_zone.delete_rrset_by_name(self.name)
|
||||||
|
|
||||||
|
|
||||||
|
def reverse_domain_name(domain_name):
|
||||||
|
if domain_name.endswith('.'): # normalize without trailing dot
|
||||||
|
domain_name = domain_name[:-1]
|
||||||
|
return '.'.join(reversed(domain_name.split('.')))
|
||||||
|
|
||||||
|
|
||||||
class FakeZone(BaseModel):
|
class FakeZone(BaseModel):
|
||||||
|
|
||||||
def __init__(self, name, id_, private_zone, comment=None):
|
def __init__(self, name, id_, private_zone, comment=None):
|
||||||
@ -200,12 +206,15 @@ class FakeZone(BaseModel):
|
|||||||
|
|
||||||
def get_record_sets(self, start_type, start_name):
|
def get_record_sets(self, start_type, start_name):
|
||||||
record_sets = list(self.rrsets) # Copy the list
|
record_sets = list(self.rrsets) # Copy the list
|
||||||
|
if start_name:
|
||||||
|
record_sets = [
|
||||||
|
record_set
|
||||||
|
for record_set in record_sets
|
||||||
|
if reverse_domain_name(record_set.name) >= reverse_domain_name(start_name)
|
||||||
|
]
|
||||||
if start_type:
|
if start_type:
|
||||||
record_sets = [
|
record_sets = [
|
||||||
record_set for record_set in record_sets if record_set.type_ >= start_type]
|
record_set for record_set in record_sets if record_set.type_ >= start_type]
|
||||||
if start_name:
|
|
||||||
record_sets = [
|
|
||||||
record_set for record_set in record_sets if record_set.name >= start_name]
|
|
||||||
|
|
||||||
return record_sets
|
return record_sets
|
||||||
|
|
||||||
|
13
setup.py
13
setup.py
@ -18,6 +18,15 @@ def read(*parts):
|
|||||||
return fp.read()
|
return fp.read()
|
||||||
|
|
||||||
|
|
||||||
|
def get_version():
|
||||||
|
version_file = read('moto', '__init__.py')
|
||||||
|
version_match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]',
|
||||||
|
version_file, re.MULTILINE)
|
||||||
|
if version_match:
|
||||||
|
return version_match.group(1)
|
||||||
|
raise RuntimeError('Unable to find version string.')
|
||||||
|
|
||||||
|
|
||||||
install_requires = [
|
install_requires = [
|
||||||
"Jinja2>=2.10.1",
|
"Jinja2>=2.10.1",
|
||||||
"boto>=2.36.0",
|
"boto>=2.36.0",
|
||||||
@ -29,7 +38,7 @@ install_requires = [
|
|||||||
"xmltodict",
|
"xmltodict",
|
||||||
"six>1.9",
|
"six>1.9",
|
||||||
"werkzeug",
|
"werkzeug",
|
||||||
"PyYAML==3.13",
|
"PyYAML",
|
||||||
"pytz",
|
"pytz",
|
||||||
"python-dateutil<3.0.0,>=2.1",
|
"python-dateutil<3.0.0,>=2.1",
|
||||||
"python-jose<4.0.0",
|
"python-jose<4.0.0",
|
||||||
@ -57,7 +66,7 @@ else:
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='moto',
|
name='moto',
|
||||||
version='1.3.8',
|
version=get_version(),
|
||||||
description='A library that allows your python tests to easily'
|
description='A library that allows your python tests to easily'
|
||||||
' mock out the boto library',
|
' mock out the boto library',
|
||||||
long_description=read('README.md'),
|
long_description=read('README.md'),
|
||||||
|
@ -452,6 +452,90 @@ def test_basic_projection_expressions():
|
|||||||
assert 'body' in results['Items'][1]
|
assert 'body' in results['Items'][1]
|
||||||
assert 'forum_name' in results['Items'][1]
|
assert 'forum_name' in results['Items'][1]
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_basic_projection_expressions_using_scan():
|
||||||
|
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
# Create the DynamoDB table.
|
||||||
|
table = dynamodb.create_table(
|
||||||
|
TableName='users',
|
||||||
|
KeySchema=[
|
||||||
|
{
|
||||||
|
'AttributeName': 'forum_name',
|
||||||
|
'KeyType': 'HASH'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'AttributeName': 'subject',
|
||||||
|
'KeyType': 'RANGE'
|
||||||
|
},
|
||||||
|
],
|
||||||
|
AttributeDefinitions=[
|
||||||
|
{
|
||||||
|
'AttributeName': 'forum_name',
|
||||||
|
'AttributeType': 'S'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'AttributeName': 'subject',
|
||||||
|
'AttributeType': 'S'
|
||||||
|
},
|
||||||
|
],
|
||||||
|
ProvisionedThroughput={
|
||||||
|
'ReadCapacityUnits': 5,
|
||||||
|
'WriteCapacityUnits': 5
|
||||||
|
}
|
||||||
|
)
|
||||||
|
table = dynamodb.Table('users')
|
||||||
|
|
||||||
|
table.put_item(Item={
|
||||||
|
'forum_name': 'the-key',
|
||||||
|
'subject': '123',
|
||||||
|
'body': 'some test message'
|
||||||
|
})
|
||||||
|
|
||||||
|
table.put_item(Item={
|
||||||
|
'forum_name': 'not-the-key',
|
||||||
|
'subject': '123',
|
||||||
|
'body': 'some other test message'
|
||||||
|
})
|
||||||
|
# Test a scan returning all items
|
||||||
|
results = table.scan(
|
||||||
|
FilterExpression=Key('forum_name').eq(
|
||||||
|
'the-key'),
|
||||||
|
ProjectionExpression='body, subject'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'body' in results['Items'][0]
|
||||||
|
assert results['Items'][0]['body'] == 'some test message'
|
||||||
|
assert 'subject' in results['Items'][0]
|
||||||
|
|
||||||
|
table.put_item(Item={
|
||||||
|
'forum_name': 'the-key',
|
||||||
|
'subject': '1234',
|
||||||
|
'body': 'yet another test message'
|
||||||
|
})
|
||||||
|
|
||||||
|
results = table.scan(
|
||||||
|
FilterExpression=Key('forum_name').eq(
|
||||||
|
'the-key'),
|
||||||
|
ProjectionExpression='body'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'body' in results['Items'][0]
|
||||||
|
assert 'subject' not in results['Items'][0]
|
||||||
|
assert 'forum_name' not in results['Items'][0]
|
||||||
|
assert 'body' in results['Items'][1]
|
||||||
|
assert 'subject' not in results['Items'][1]
|
||||||
|
assert 'forum_name' not in results['Items'][1]
|
||||||
|
|
||||||
|
# The projection expression should not remove data from storage
|
||||||
|
results = table.query(
|
||||||
|
KeyConditionExpression=Key('forum_name').eq(
|
||||||
|
'the-key'),
|
||||||
|
)
|
||||||
|
assert 'subject' in results['Items'][0]
|
||||||
|
assert 'body' in results['Items'][1]
|
||||||
|
assert 'forum_name' in results['Items'][1]
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_basic_projection_expressions_with_attr_expression_names():
|
def test_basic_projection_expressions_with_attr_expression_names():
|
||||||
@ -519,6 +603,84 @@ def test_basic_projection_expressions_with_attr_expression_names():
|
|||||||
assert 'attachment' in results['Items'][0]
|
assert 'attachment' in results['Items'][0]
|
||||||
assert results['Items'][0]['attachment'] == 'something'
|
assert results['Items'][0]['attachment'] == 'something'
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_basic_projection_expressions_using_scan_with_attr_expression_names():
|
||||||
|
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
# Create the DynamoDB table.
|
||||||
|
table = dynamodb.create_table(
|
||||||
|
TableName='users',
|
||||||
|
KeySchema=[
|
||||||
|
{
|
||||||
|
'AttributeName': 'forum_name',
|
||||||
|
'KeyType': 'HASH'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'AttributeName': 'subject',
|
||||||
|
'KeyType': 'RANGE'
|
||||||
|
},
|
||||||
|
],
|
||||||
|
AttributeDefinitions=[
|
||||||
|
{
|
||||||
|
'AttributeName': 'forum_name',
|
||||||
|
'AttributeType': 'S'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'AttributeName': 'subject',
|
||||||
|
'AttributeType': 'S'
|
||||||
|
},
|
||||||
|
],
|
||||||
|
ProvisionedThroughput={
|
||||||
|
'ReadCapacityUnits': 5,
|
||||||
|
'WriteCapacityUnits': 5
|
||||||
|
}
|
||||||
|
)
|
||||||
|
table = dynamodb.Table('users')
|
||||||
|
|
||||||
|
table.put_item(Item={
|
||||||
|
'forum_name': 'the-key',
|
||||||
|
'subject': '123',
|
||||||
|
'body': 'some test message',
|
||||||
|
'attachment': 'something'
|
||||||
|
})
|
||||||
|
|
||||||
|
table.put_item(Item={
|
||||||
|
'forum_name': 'not-the-key',
|
||||||
|
'subject': '123',
|
||||||
|
'body': 'some other test message',
|
||||||
|
'attachment': 'something'
|
||||||
|
})
|
||||||
|
# Test a scan returning all items
|
||||||
|
|
||||||
|
results = table.scan(
|
||||||
|
FilterExpression=Key('forum_name').eq(
|
||||||
|
'the-key'),
|
||||||
|
ProjectionExpression='#rl, #rt, subject',
|
||||||
|
ExpressionAttributeNames={
|
||||||
|
'#rl': 'body',
|
||||||
|
'#rt': 'attachment'
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'body' in results['Items'][0]
|
||||||
|
assert 'attachment' in results['Items'][0]
|
||||||
|
assert 'subject' in results['Items'][0]
|
||||||
|
assert 'form_name' not in results['Items'][0]
|
||||||
|
|
||||||
|
# Test without a FilterExpression
|
||||||
|
results = table.scan(
|
||||||
|
ProjectionExpression='#rl, #rt, subject',
|
||||||
|
ExpressionAttributeNames={
|
||||||
|
'#rl': 'body',
|
||||||
|
'#rt': 'attachment'
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'body' in results['Items'][0]
|
||||||
|
assert 'attachment' in results['Items'][0]
|
||||||
|
assert 'subject' in results['Items'][0]
|
||||||
|
assert 'form_name' not in results['Items'][0]
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_put_item_returns_consumed_capacity():
|
def test_put_item_returns_consumed_capacity():
|
||||||
|
@ -740,7 +740,7 @@ def test_batch_get_image_no_tags():
|
|||||||
@mock_ecr
|
@mock_ecr
|
||||||
def test_batch_delete_image_by_tag():
|
def test_batch_delete_image_by_tag():
|
||||||
client = boto3.client('ecr', region_name='us-east-1')
|
client = boto3.client('ecr', region_name='us-east-1')
|
||||||
_ = client.create_repository(
|
client.create_repository(
|
||||||
repositoryName='test_repository'
|
repositoryName='test_repository'
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -748,14 +748,13 @@ def test_batch_delete_image_by_tag():
|
|||||||
|
|
||||||
tags = ['v1', 'v1.0', 'latest']
|
tags = ['v1', 'v1.0', 'latest']
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
put_response = client.put_image(
|
client.put_image(
|
||||||
repositoryName='test_repository',
|
repositoryName='test_repository',
|
||||||
imageManifest=json.dumps(manifest),
|
imageManifest=json.dumps(manifest),
|
||||||
imageTag=tag,
|
imageTag=tag,
|
||||||
)
|
)
|
||||||
|
|
||||||
describe_response1 = client.describe_images(repositoryName='test_repository')
|
describe_response1 = client.describe_images(repositoryName='test_repository')
|
||||||
image_digest = describe_response1['imageDetails'][0]['imageDigest']
|
|
||||||
|
|
||||||
batch_delete_response = client.batch_delete_image(
|
batch_delete_response = client.batch_delete_image(
|
||||||
registryId='012345678910',
|
registryId='012345678910',
|
||||||
@ -784,10 +783,52 @@ def test_batch_delete_image_by_tag():
|
|||||||
len(batch_delete_response['failures']).should.be(0)
|
len(batch_delete_response['failures']).should.be(0)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ecr
|
||||||
|
def test_batch_delete_image_delete_last_tag():
|
||||||
|
client = boto3.client('ecr', region_name='us-east-1')
|
||||||
|
client.create_repository(
|
||||||
|
repositoryName='test_repository'
|
||||||
|
)
|
||||||
|
|
||||||
|
client.put_image(
|
||||||
|
repositoryName='test_repository',
|
||||||
|
imageManifest=json.dumps(_create_image_manifest()),
|
||||||
|
imageTag='v1',
|
||||||
|
)
|
||||||
|
|
||||||
|
describe_response1 = client.describe_images(repositoryName='test_repository')
|
||||||
|
|
||||||
|
batch_delete_response = client.batch_delete_image(
|
||||||
|
registryId='012345678910',
|
||||||
|
repositoryName='test_repository',
|
||||||
|
imageIds=[
|
||||||
|
{
|
||||||
|
'imageTag': 'v1'
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
describe_response2 = client.describe_images(repositoryName='test_repository')
|
||||||
|
|
||||||
|
type(describe_response1['imageDetails'][0]['imageTags']).should.be(list)
|
||||||
|
len(describe_response1['imageDetails'][0]['imageTags']).should.be(1)
|
||||||
|
|
||||||
|
type(describe_response2['imageDetails']).should.be(list)
|
||||||
|
len(describe_response2['imageDetails']).should.be(0)
|
||||||
|
|
||||||
|
type(batch_delete_response['imageIds']).should.be(list)
|
||||||
|
len(batch_delete_response['imageIds']).should.be(1)
|
||||||
|
|
||||||
|
batch_delete_response['imageIds'][0]['imageTag'].should.equal("v1")
|
||||||
|
|
||||||
|
type(batch_delete_response['failures']).should.be(list)
|
||||||
|
len(batch_delete_response['failures']).should.be(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_ecr
|
@mock_ecr
|
||||||
def test_batch_delete_image_with_nonexistent_tag():
|
def test_batch_delete_image_with_nonexistent_tag():
|
||||||
client = boto3.client('ecr', region_name='us-east-1')
|
client = boto3.client('ecr', region_name='us-east-1')
|
||||||
_ = client.create_repository(
|
client.create_repository(
|
||||||
repositoryName='test_repository'
|
repositoryName='test_repository'
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -795,14 +836,13 @@ def test_batch_delete_image_with_nonexistent_tag():
|
|||||||
|
|
||||||
tags = ['v1', 'v1.0', 'latest']
|
tags = ['v1', 'v1.0', 'latest']
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
put_response = client.put_image(
|
client.put_image(
|
||||||
repositoryName='test_repository',
|
repositoryName='test_repository',
|
||||||
imageManifest=json.dumps(manifest),
|
imageManifest=json.dumps(manifest),
|
||||||
imageTag=tag,
|
imageTag=tag,
|
||||||
)
|
)
|
||||||
|
|
||||||
describe_response = client.describe_images(repositoryName='test_repository')
|
describe_response = client.describe_images(repositoryName='test_repository')
|
||||||
image_digest = describe_response['imageDetails'][0]['imageDigest']
|
|
||||||
|
|
||||||
missing_tag = "missing-tag"
|
missing_tag = "missing-tag"
|
||||||
batch_delete_response = client.batch_delete_image(
|
batch_delete_response = client.batch_delete_image(
|
||||||
@ -832,7 +872,7 @@ def test_batch_delete_image_with_nonexistent_tag():
|
|||||||
@mock_ecr
|
@mock_ecr
|
||||||
def test_batch_delete_image_by_digest():
|
def test_batch_delete_image_by_digest():
|
||||||
client = boto3.client('ecr', region_name='us-east-1')
|
client = boto3.client('ecr', region_name='us-east-1')
|
||||||
_ = client.create_repository(
|
client.create_repository(
|
||||||
repositoryName='test_repository'
|
repositoryName='test_repository'
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -840,7 +880,7 @@ def test_batch_delete_image_by_digest():
|
|||||||
|
|
||||||
tags = ['v1', 'v2', 'latest']
|
tags = ['v1', 'v2', 'latest']
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
put_response = client.put_image(
|
client.put_image(
|
||||||
repositoryName='test_repository',
|
repositoryName='test_repository',
|
||||||
imageManifest=json.dumps(manifest),
|
imageManifest=json.dumps(manifest),
|
||||||
imageTag=tag
|
imageTag=tag
|
||||||
@ -883,7 +923,7 @@ def test_batch_delete_image_by_digest():
|
|||||||
@mock_ecr
|
@mock_ecr
|
||||||
def test_batch_delete_image_with_invalid_digest():
|
def test_batch_delete_image_with_invalid_digest():
|
||||||
client = boto3.client('ecr', region_name='us-east-1')
|
client = boto3.client('ecr', region_name='us-east-1')
|
||||||
_ = client.create_repository(
|
client.create_repository(
|
||||||
repositoryName='test_repository'
|
repositoryName='test_repository'
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -891,13 +931,12 @@ def test_batch_delete_image_with_invalid_digest():
|
|||||||
|
|
||||||
tags = ['v1', 'v2', 'latest']
|
tags = ['v1', 'v2', 'latest']
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
put_response = client.put_image(
|
client.put_image(
|
||||||
repositoryName='test_repository',
|
repositoryName='test_repository',
|
||||||
imageManifest=json.dumps(manifest),
|
imageManifest=json.dumps(manifest),
|
||||||
imageTag=tag
|
imageTag=tag
|
||||||
)
|
)
|
||||||
|
|
||||||
describe_response = client.describe_images(repositoryName='test_repository')
|
|
||||||
invalid_image_digest = 'sha256:invalid-digest'
|
invalid_image_digest = 'sha256:invalid-digest'
|
||||||
|
|
||||||
batch_delete_response = client.batch_delete_image(
|
batch_delete_response = client.batch_delete_image(
|
||||||
@ -924,7 +963,7 @@ def test_batch_delete_image_with_invalid_digest():
|
|||||||
@mock_ecr
|
@mock_ecr
|
||||||
def test_batch_delete_image_with_missing_parameters():
|
def test_batch_delete_image_with_missing_parameters():
|
||||||
client = boto3.client('ecr', region_name='us-east-1')
|
client = boto3.client('ecr', region_name='us-east-1')
|
||||||
_ = client.create_repository(
|
client.create_repository(
|
||||||
repositoryName='test_repository'
|
repositoryName='test_repository'
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -950,7 +989,7 @@ def test_batch_delete_image_with_missing_parameters():
|
|||||||
@mock_ecr
|
@mock_ecr
|
||||||
def test_batch_delete_image_with_matching_digest_and_tag():
|
def test_batch_delete_image_with_matching_digest_and_tag():
|
||||||
client = boto3.client('ecr', region_name='us-east-1')
|
client = boto3.client('ecr', region_name='us-east-1')
|
||||||
_ = client.create_repository(
|
client.create_repository(
|
||||||
repositoryName='test_repository'
|
repositoryName='test_repository'
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -958,7 +997,7 @@ def test_batch_delete_image_with_matching_digest_and_tag():
|
|||||||
|
|
||||||
tags = ['v1', 'v1.0', 'latest']
|
tags = ['v1', 'v1.0', 'latest']
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
put_response = client.put_image(
|
client.put_image(
|
||||||
repositoryName='test_repository',
|
repositoryName='test_repository',
|
||||||
imageManifest=json.dumps(manifest),
|
imageManifest=json.dumps(manifest),
|
||||||
imageTag=tag
|
imageTag=tag
|
||||||
@ -1002,7 +1041,7 @@ def test_batch_delete_image_with_matching_digest_and_tag():
|
|||||||
@mock_ecr
|
@mock_ecr
|
||||||
def test_batch_delete_image_with_mismatched_digest_and_tag():
|
def test_batch_delete_image_with_mismatched_digest_and_tag():
|
||||||
client = boto3.client('ecr', region_name='us-east-1')
|
client = boto3.client('ecr', region_name='us-east-1')
|
||||||
_ = client.create_repository(
|
client.create_repository(
|
||||||
repositoryName='test_repository'
|
repositoryName='test_repository'
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1010,7 +1049,7 @@ def test_batch_delete_image_with_mismatched_digest_and_tag():
|
|||||||
|
|
||||||
tags = ['v1', 'latest']
|
tags = ['v1', 'latest']
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
put_response = client.put_image(
|
client.put_image(
|
||||||
repositoryName='test_repository',
|
repositoryName='test_repository',
|
||||||
imageManifest=json.dumps(manifest),
|
imageManifest=json.dumps(manifest),
|
||||||
imageTag=tag
|
imageTag=tag
|
||||||
|
@ -531,3 +531,112 @@ def test_update_partition_move():
|
|||||||
|
|
||||||
partition['TableName'].should.equal(table_name)
|
partition['TableName'].should.equal(table_name)
|
||||||
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
|
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
|
||||||
|
|
||||||
|
@mock_glue
|
||||||
|
def test_delete_partition():
|
||||||
|
client = boto3.client('glue', region_name='us-east-1')
|
||||||
|
database_name = 'myspecialdatabase'
|
||||||
|
table_name = 'myfirsttable'
|
||||||
|
values = ['2018-10-01']
|
||||||
|
helpers.create_database(client, database_name)
|
||||||
|
helpers.create_table(client, database_name, table_name)
|
||||||
|
|
||||||
|
part_input = helpers.create_partition_input(database_name, table_name, values=values)
|
||||||
|
helpers.create_partition(client, database_name, table_name, part_input)
|
||||||
|
|
||||||
|
client.delete_partition(
|
||||||
|
DatabaseName=database_name,
|
||||||
|
TableName=table_name,
|
||||||
|
PartitionValues=values,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
|
||||||
|
partitions = response['Partitions']
|
||||||
|
partitions.should.be.empty
|
||||||
|
|
||||||
|
@mock_glue
|
||||||
|
def test_delete_partition_bad_partition():
|
||||||
|
client = boto3.client('glue', region_name='us-east-1')
|
||||||
|
database_name = 'myspecialdatabase'
|
||||||
|
table_name = 'myfirsttable'
|
||||||
|
values = ['2018-10-01']
|
||||||
|
helpers.create_database(client, database_name)
|
||||||
|
helpers.create_table(client, database_name, table_name)
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as exc:
|
||||||
|
client.delete_partition(
|
||||||
|
DatabaseName=database_name,
|
||||||
|
TableName=table_name,
|
||||||
|
PartitionValues=values,
|
||||||
|
)
|
||||||
|
|
||||||
|
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||||
|
|
||||||
|
@mock_glue
|
||||||
|
def test_batch_delete_partition():
|
||||||
|
client = boto3.client('glue', region_name='us-east-1')
|
||||||
|
database_name = 'myspecialdatabase'
|
||||||
|
table_name = 'myfirsttable'
|
||||||
|
helpers.create_database(client, database_name)
|
||||||
|
helpers.create_table(client, database_name, table_name)
|
||||||
|
|
||||||
|
partition_inputs = []
|
||||||
|
for i in range(0, 20):
|
||||||
|
values = ["2018-10-{:2}".format(i)]
|
||||||
|
part_input = helpers.create_partition_input(database_name, table_name, values=values)
|
||||||
|
partition_inputs.append(part_input)
|
||||||
|
|
||||||
|
client.batch_create_partition(
|
||||||
|
DatabaseName=database_name,
|
||||||
|
TableName=table_name,
|
||||||
|
PartitionInputList=partition_inputs
|
||||||
|
)
|
||||||
|
|
||||||
|
partition_values = [{"Values": p["Values"]} for p in partition_inputs]
|
||||||
|
|
||||||
|
response = client.batch_delete_partition(
|
||||||
|
DatabaseName=database_name,
|
||||||
|
TableName=table_name,
|
||||||
|
PartitionsToDelete=partition_values,
|
||||||
|
)
|
||||||
|
|
||||||
|
response.should_not.have.key('Errors')
|
||||||
|
|
||||||
|
@mock_glue
|
||||||
|
def test_batch_delete_partition_with_bad_partitions():
|
||||||
|
client = boto3.client('glue', region_name='us-east-1')
|
||||||
|
database_name = 'myspecialdatabase'
|
||||||
|
table_name = 'myfirsttable'
|
||||||
|
helpers.create_database(client, database_name)
|
||||||
|
helpers.create_table(client, database_name, table_name)
|
||||||
|
|
||||||
|
partition_inputs = []
|
||||||
|
for i in range(0, 20):
|
||||||
|
values = ["2018-10-{:2}".format(i)]
|
||||||
|
part_input = helpers.create_partition_input(database_name, table_name, values=values)
|
||||||
|
partition_inputs.append(part_input)
|
||||||
|
|
||||||
|
client.batch_create_partition(
|
||||||
|
DatabaseName=database_name,
|
||||||
|
TableName=table_name,
|
||||||
|
PartitionInputList=partition_inputs
|
||||||
|
)
|
||||||
|
|
||||||
|
partition_values = [{"Values": p["Values"]} for p in partition_inputs]
|
||||||
|
|
||||||
|
partition_values.insert(5, {"Values": ["2018-11-01"]})
|
||||||
|
partition_values.insert(10, {"Values": ["2018-11-02"]})
|
||||||
|
partition_values.insert(15, {"Values": ["2018-11-03"]})
|
||||||
|
|
||||||
|
response = client.batch_delete_partition(
|
||||||
|
DatabaseName=database_name,
|
||||||
|
TableName=table_name,
|
||||||
|
PartitionsToDelete=partition_values,
|
||||||
|
)
|
||||||
|
|
||||||
|
response.should.have.key('Errors')
|
||||||
|
response['Errors'].should.have.length_of(3)
|
||||||
|
error_partitions = map(lambda x: x['PartitionValues'], response['Errors'])
|
||||||
|
['2018-11-01'].should.be.within(error_partitions)
|
||||||
|
['2018-11-02'].should.be.within(error_partitions)
|
||||||
|
['2018-11-03'].should.be.within(error_partitions)
|
||||||
|
@ -637,13 +637,17 @@ def test_delete_login_profile():
|
|||||||
conn.delete_login_profile('my-user')
|
conn.delete_login_profile('my-user')
|
||||||
|
|
||||||
|
|
||||||
@mock_iam_deprecated()
|
@mock_iam()
|
||||||
def test_create_access_key():
|
def test_create_access_key():
|
||||||
conn = boto.connect_iam()
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
with assert_raises(BotoServerError):
|
with assert_raises(ClientError):
|
||||||
conn.create_access_key('my-user')
|
conn.create_access_key(UserName='my-user')
|
||||||
conn.create_user('my-user')
|
conn.create_user(UserName='my-user')
|
||||||
conn.create_access_key('my-user')
|
access_key = conn.create_access_key(UserName='my-user')["AccessKey"]
|
||||||
|
(datetime.utcnow() - access_key["CreateDate"].replace(tzinfo=None)).seconds.should.be.within(0, 10)
|
||||||
|
access_key["AccessKeyId"].should.have.length_of(20)
|
||||||
|
access_key["SecretAccessKey"].should.have.length_of(40)
|
||||||
|
assert access_key["AccessKeyId"].startswith("AKIA")
|
||||||
|
|
||||||
|
|
||||||
@mock_iam_deprecated()
|
@mock_iam_deprecated()
|
||||||
|
@ -15,7 +15,7 @@ from moto import mock_kinesis, mock_kinesis_deprecated
|
|||||||
def test_create_cluster():
|
def test_create_cluster():
|
||||||
conn = boto.kinesis.connect_to_region("us-west-2")
|
conn = boto.kinesis.connect_to_region("us-west-2")
|
||||||
|
|
||||||
conn.create_stream("my_stream", 2)
|
conn.create_stream("my_stream", 3)
|
||||||
|
|
||||||
stream_response = conn.describe_stream("my_stream")
|
stream_response = conn.describe_stream("my_stream")
|
||||||
|
|
||||||
@ -27,7 +27,7 @@ def test_create_cluster():
|
|||||||
stream["StreamStatus"].should.equal("ACTIVE")
|
stream["StreamStatus"].should.equal("ACTIVE")
|
||||||
|
|
||||||
shards = stream['Shards']
|
shards = stream['Shards']
|
||||||
shards.should.have.length_of(2)
|
shards.should.have.length_of(3)
|
||||||
|
|
||||||
|
|
||||||
@mock_kinesis_deprecated
|
@mock_kinesis_deprecated
|
||||||
|
@ -123,12 +123,12 @@ def test_rrset():
|
|||||||
rrsets.should.have.length_of(2)
|
rrsets.should.have.length_of(2)
|
||||||
|
|
||||||
rrsets = conn.get_all_rrsets(
|
rrsets = conn.get_all_rrsets(
|
||||||
zoneid, name="foo.bar.testdns.aws.com", type="A")
|
zoneid, name="bar.foo.testdns.aws.com", type="A")
|
||||||
rrsets.should.have.length_of(1)
|
rrsets.should.have.length_of(1)
|
||||||
rrsets[0].resource_records[0].should.equal('1.2.3.4')
|
rrsets[0].resource_records[0].should.equal('5.6.7.8')
|
||||||
|
|
||||||
rrsets = conn.get_all_rrsets(
|
rrsets = conn.get_all_rrsets(
|
||||||
zoneid, name="bar.foo.testdns.aws.com", type="A")
|
zoneid, name="foo.bar.testdns.aws.com", type="A")
|
||||||
rrsets.should.have.length_of(2)
|
rrsets.should.have.length_of(2)
|
||||||
resource_records = [rr for rr_set in rrsets for rr in rr_set.resource_records]
|
resource_records = [rr for rr_set in rrsets for rr in rr_set.resource_records]
|
||||||
resource_records.should.contain('1.2.3.4')
|
resource_records.should.contain('1.2.3.4')
|
||||||
|
118
update_version_from_git.py
Normal file
118
update_version_from_git.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
"""
|
||||||
|
Adapted from https://github.com/pygame/pygameweb/blob/master/pygameweb/builds/update_version_from_git.py
|
||||||
|
|
||||||
|
For updating the version from git.
|
||||||
|
__init__.py contains a __version__ field.
|
||||||
|
Update that.
|
||||||
|
If we are on master, we want to update the version as a pre-release.
|
||||||
|
git describe --tags
|
||||||
|
With these:
|
||||||
|
__init__.py
|
||||||
|
__version__= '0.0.2'
|
||||||
|
git describe --tags
|
||||||
|
0.0.1-22-g729a5ae
|
||||||
|
We want this:
|
||||||
|
__init__.py
|
||||||
|
__version__= '0.0.2.dev22.g729a5ae'
|
||||||
|
Get the branch/tag name with this.
|
||||||
|
git symbolic-ref -q --short HEAD || git describe --tags --exact-match
|
||||||
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_source_attribute(attr, to_this, target_file, regex):
|
||||||
|
"""Updates __magic__ attributes in the source file"""
|
||||||
|
change_this = re.compile(regex, re.S)
|
||||||
|
new_file = []
|
||||||
|
found = False
|
||||||
|
|
||||||
|
with open(target_file, 'r') as fp:
|
||||||
|
lines = fp.readlines()
|
||||||
|
|
||||||
|
for line in lines:
|
||||||
|
if line.startswith(attr):
|
||||||
|
found = True
|
||||||
|
line = re.sub(change_this, to_this, line)
|
||||||
|
new_file.append(line)
|
||||||
|
|
||||||
|
if found:
|
||||||
|
with open(target_file, 'w') as fp:
|
||||||
|
fp.writelines(new_file)
|
||||||
|
|
||||||
|
def migrate_version(target_file, new_version):
|
||||||
|
"""Updates __version__ in the source file"""
|
||||||
|
regex = r"['\"](.*)['\"]"
|
||||||
|
migrate_source_attribute('__version__', "'{new_version}'".format(new_version=new_version), target_file, regex)
|
||||||
|
|
||||||
|
|
||||||
|
def is_master_branch():
|
||||||
|
cmd = ('git rev-parse --abbrev-ref HEAD')
|
||||||
|
tag_branch = subprocess.check_output(cmd, shell=True)
|
||||||
|
return tag_branch in [b'master\n']
|
||||||
|
|
||||||
|
def git_tag_name():
|
||||||
|
cmd = ('git describe --tags')
|
||||||
|
tag_branch = subprocess.check_output(cmd, shell=True)
|
||||||
|
tag_branch = tag_branch.decode().strip()
|
||||||
|
return tag_branch
|
||||||
|
|
||||||
|
def get_git_version_info():
|
||||||
|
cmd = 'git describe --tags'
|
||||||
|
ver_str = subprocess.check_output(cmd, shell=True)
|
||||||
|
ver, commits_since, githash = ver_str.decode().strip().split('-')
|
||||||
|
return ver, commits_since, githash
|
||||||
|
|
||||||
|
def prerelease_version():
|
||||||
|
""" return what the prerelease version should be.
|
||||||
|
https://packaging.python.org/tutorials/distributing-packages/#pre-release-versioning
|
||||||
|
0.0.2.dev22
|
||||||
|
"""
|
||||||
|
ver, commits_since, githash = get_git_version_info()
|
||||||
|
initpy_ver = get_version()
|
||||||
|
|
||||||
|
assert len(initpy_ver.split('.')) in [3, 4], 'moto/__init__.py version should be like 0.0.2 or 0.0.2.dev'
|
||||||
|
assert initpy_ver > ver, 'the moto/__init__.py version should be newer than the last tagged release.'
|
||||||
|
return '{initpy_ver}.dev{commits_since}'.format(initpy_ver=initpy_ver, commits_since=commits_since)
|
||||||
|
|
||||||
|
def read(*parts):
|
||||||
|
""" Reads in file from *parts.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return io.open(os.path.join(*parts), 'r', encoding='utf-8').read()
|
||||||
|
except IOError:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def get_version():
|
||||||
|
""" Returns version from moto/__init__.py
|
||||||
|
"""
|
||||||
|
version_file = read('moto', '__init__.py')
|
||||||
|
version_match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]',
|
||||||
|
version_file, re.MULTILINE)
|
||||||
|
if version_match:
|
||||||
|
return version_match.group(1)
|
||||||
|
raise RuntimeError('Unable to find version string.')
|
||||||
|
|
||||||
|
|
||||||
|
def release_version_correct():
|
||||||
|
"""Makes sure the:
|
||||||
|
- prerelease verion for master is correct.
|
||||||
|
- release version is correct for tags.
|
||||||
|
"""
|
||||||
|
if is_master_branch():
|
||||||
|
# update for a pre release version.
|
||||||
|
initpy = os.path.abspath("moto/__init__.py")
|
||||||
|
|
||||||
|
new_version = prerelease_version()
|
||||||
|
print('updating version in __init__.py to {new_version}'.format(new_version=new_version))
|
||||||
|
migrate_version(initpy, new_version)
|
||||||
|
else:
|
||||||
|
# check that we are a tag with the same version as in __init__.py
|
||||||
|
assert get_version() == git_tag_name(), 'git tag/branch name not the same as moto/__init__.py __verion__'
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
release_version_correct()
|
Loading…
Reference in New Issue
Block a user