Merge pull request #3 from spulec/master
Pull latest changes from upstream
This commit is contained in:
commit
dddc12868a
4
.gitignore
vendored
4
.gitignore
vendored
@ -15,4 +15,6 @@ python_env
|
|||||||
.ropeproject/
|
.ropeproject/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
venv/
|
venv/
|
||||||
|
.python-version
|
||||||
|
.vscode/
|
||||||
|
tests/file.tmp
|
||||||
|
17
.travis.yml
17
.travis.yml
@ -1,3 +1,4 @@
|
|||||||
|
dist: xenial
|
||||||
language: python
|
language: python
|
||||||
sudo: false
|
sudo: false
|
||||||
services:
|
services:
|
||||||
@ -5,26 +6,12 @@ services:
|
|||||||
python:
|
python:
|
||||||
- 2.7
|
- 2.7
|
||||||
- 3.6
|
- 3.6
|
||||||
|
- 3.7
|
||||||
env:
|
env:
|
||||||
- TEST_SERVER_MODE=false
|
- TEST_SERVER_MODE=false
|
||||||
- TEST_SERVER_MODE=true
|
- TEST_SERVER_MODE=true
|
||||||
# Due to incomplete Python 3.7 support on Travis CI (
|
|
||||||
# https://github.com/travis-ci/travis-ci/issues/9815),
|
|
||||||
# using a matrix is necessary
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- python: 3.7
|
|
||||||
env: TEST_SERVER_MODE=false
|
|
||||||
dist: xenial
|
|
||||||
sudo: true
|
|
||||||
- python: 3.7
|
|
||||||
env: TEST_SERVER_MODE=true
|
|
||||||
dist: xenial
|
|
||||||
sudo: true
|
|
||||||
before_install:
|
before_install:
|
||||||
- export BOTO_CONFIG=/dev/null
|
- export BOTO_CONFIG=/dev/null
|
||||||
- export AWS_SECRET_ACCESS_KEY=foobar_secret
|
|
||||||
- export AWS_ACCESS_KEY_ID=foobar_key
|
|
||||||
install:
|
install:
|
||||||
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use
|
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use
|
||||||
# -d for docker run so the logs show up in travis
|
# -d for docker run so the logs show up in travis
|
||||||
|
@ -54,3 +54,4 @@ Moto is written by Steve Pulec with contributions from:
|
|||||||
* [William Richard](https://github.com/william-richard)
|
* [William Richard](https://github.com/william-richard)
|
||||||
* [Alex Casalboni](https://github.com/alexcasalboni)
|
* [Alex Casalboni](https://github.com/alexcasalboni)
|
||||||
* [Jon Beilke](https://github.com/jrbeilke)
|
* [Jon Beilke](https://github.com/jrbeilke)
|
||||||
|
* [Robert Lewis](https://github.com/ralewis85)
|
||||||
|
@ -470,46 +470,46 @@
|
|||||||
- [ ] upgrade_applied_schema
|
- [ ] upgrade_applied_schema
|
||||||
- [ ] upgrade_published_schema
|
- [ ] upgrade_published_schema
|
||||||
|
|
||||||
## cloudformation - 21% implemented
|
## cloudformation - 56% implemented
|
||||||
- [ ] cancel_update_stack
|
- [ ] cancel_update_stack
|
||||||
- [ ] continue_update_rollback
|
- [ ] continue_update_rollback
|
||||||
- [X] create_change_set
|
- [X] create_change_set
|
||||||
- [X] create_stack
|
- [X] create_stack
|
||||||
- [ ] create_stack_instances
|
- [X] create_stack_instances
|
||||||
- [ ] create_stack_set
|
- [X] create_stack_set
|
||||||
- [ ] delete_change_set
|
- [X] delete_change_set
|
||||||
- [X] delete_stack
|
- [X] delete_stack
|
||||||
- [ ] delete_stack_instances
|
- [X] delete_stack_instances
|
||||||
- [ ] delete_stack_set
|
- [X] delete_stack_set
|
||||||
- [ ] describe_account_limits
|
- [ ] describe_account_limits
|
||||||
- [ ] describe_change_set
|
- [X] describe_change_set
|
||||||
- [ ] describe_stack_events
|
- [ ] describe_stack_events
|
||||||
- [ ] describe_stack_instance
|
- [X] describe_stack_instance
|
||||||
- [ ] describe_stack_resource
|
- [ ] describe_stack_resource
|
||||||
- [ ] describe_stack_resources
|
- [ ] describe_stack_resources
|
||||||
- [ ] describe_stack_set
|
- [X] describe_stack_set
|
||||||
- [ ] describe_stack_set_operation
|
- [X] describe_stack_set_operation
|
||||||
- [X] describe_stacks
|
- [X] describe_stacks
|
||||||
- [ ] estimate_template_cost
|
- [ ] estimate_template_cost
|
||||||
- [X] execute_change_set
|
- [X] execute_change_set
|
||||||
- [ ] get_stack_policy
|
- [ ] get_stack_policy
|
||||||
- [ ] get_template
|
- [ ] get_template
|
||||||
- [ ] get_template_summary
|
- [ ] get_template_summary
|
||||||
- [ ] list_change_sets
|
- [X] list_change_sets
|
||||||
- [X] list_exports
|
- [X] list_exports
|
||||||
- [ ] list_imports
|
- [ ] list_imports
|
||||||
- [ ] list_stack_instances
|
- [X] list_stack_instances
|
||||||
- [X] list_stack_resources
|
- [X] list_stack_resources
|
||||||
- [ ] list_stack_set_operation_results
|
- [X] list_stack_set_operation_results
|
||||||
- [ ] list_stack_set_operations
|
- [X] list_stack_set_operations
|
||||||
- [ ] list_stack_sets
|
- [X] list_stack_sets
|
||||||
- [X] list_stacks
|
- [X] list_stacks
|
||||||
- [ ] set_stack_policy
|
- [ ] set_stack_policy
|
||||||
- [ ] signal_resource
|
- [ ] signal_resource
|
||||||
- [ ] stop_stack_set_operation
|
- [X] stop_stack_set_operation
|
||||||
- [X] update_stack
|
- [X] update_stack
|
||||||
- [ ] update_stack_instances
|
- [X] update_stack_instances
|
||||||
- [ ] update_stack_set
|
- [X] update_stack_set
|
||||||
- [ ] update_termination_protection
|
- [ ] update_termination_protection
|
||||||
- [ ] validate_template
|
- [ ] validate_template
|
||||||
|
|
||||||
@ -827,25 +827,25 @@
|
|||||||
- [ ] unlink_identity
|
- [ ] unlink_identity
|
||||||
- [ ] update_identity_pool
|
- [ ] update_identity_pool
|
||||||
|
|
||||||
## cognito-idp - 0% implemented
|
## cognito-idp - 34% implemented
|
||||||
- [ ] add_custom_attributes
|
- [ ] add_custom_attributes
|
||||||
- [ ] admin_add_user_to_group
|
- [X] admin_add_user_to_group
|
||||||
- [ ] admin_confirm_sign_up
|
- [ ] admin_confirm_sign_up
|
||||||
- [ ] admin_create_user
|
- [X] admin_create_user
|
||||||
- [ ] admin_delete_user
|
- [X] admin_delete_user
|
||||||
- [ ] admin_delete_user_attributes
|
- [ ] admin_delete_user_attributes
|
||||||
- [ ] admin_disable_provider_for_user
|
- [ ] admin_disable_provider_for_user
|
||||||
- [X] admin_disable_user
|
- [X] admin_disable_user
|
||||||
- [X] admin_enable_user
|
- [X] admin_enable_user
|
||||||
- [ ] admin_forget_device
|
- [ ] admin_forget_device
|
||||||
- [ ] admin_get_device
|
- [ ] admin_get_device
|
||||||
- [ ] admin_get_user
|
- [X] admin_get_user
|
||||||
- [ ] admin_initiate_auth
|
- [X] admin_initiate_auth
|
||||||
- [ ] admin_link_provider_for_user
|
- [ ] admin_link_provider_for_user
|
||||||
- [ ] admin_list_devices
|
- [ ] admin_list_devices
|
||||||
- [ ] admin_list_groups_for_user
|
- [X] admin_list_groups_for_user
|
||||||
- [ ] admin_list_user_auth_events
|
- [ ] admin_list_user_auth_events
|
||||||
- [ ] admin_remove_user_from_group
|
- [X] admin_remove_user_from_group
|
||||||
- [ ] admin_reset_user_password
|
- [ ] admin_reset_user_password
|
||||||
- [ ] admin_respond_to_auth_challenge
|
- [ ] admin_respond_to_auth_challenge
|
||||||
- [ ] admin_set_user_mfa_preference
|
- [ ] admin_set_user_mfa_preference
|
||||||
@ -855,37 +855,37 @@
|
|||||||
- [ ] admin_update_user_attributes
|
- [ ] admin_update_user_attributes
|
||||||
- [ ] admin_user_global_sign_out
|
- [ ] admin_user_global_sign_out
|
||||||
- [ ] associate_software_token
|
- [ ] associate_software_token
|
||||||
- [ ] change_password
|
- [X] change_password
|
||||||
- [ ] confirm_device
|
- [ ] confirm_device
|
||||||
- [ ] confirm_forgot_password
|
- [X] confirm_forgot_password
|
||||||
- [ ] confirm_sign_up
|
- [ ] confirm_sign_up
|
||||||
- [ ] create_group
|
- [X] create_group
|
||||||
- [ ] create_identity_provider
|
- [X] create_identity_provider
|
||||||
- [ ] create_resource_server
|
- [ ] create_resource_server
|
||||||
- [ ] create_user_import_job
|
- [ ] create_user_import_job
|
||||||
- [ ] create_user_pool
|
- [X] create_user_pool
|
||||||
- [ ] create_user_pool_client
|
- [X] create_user_pool_client
|
||||||
- [ ] create_user_pool_domain
|
- [X] create_user_pool_domain
|
||||||
- [ ] delete_group
|
- [X] delete_group
|
||||||
- [ ] delete_identity_provider
|
- [X] delete_identity_provider
|
||||||
- [ ] delete_resource_server
|
- [ ] delete_resource_server
|
||||||
- [ ] delete_user
|
- [ ] delete_user
|
||||||
- [ ] delete_user_attributes
|
- [ ] delete_user_attributes
|
||||||
- [ ] delete_user_pool
|
- [X] delete_user_pool
|
||||||
- [ ] delete_user_pool_client
|
- [X] delete_user_pool_client
|
||||||
- [ ] delete_user_pool_domain
|
- [X] delete_user_pool_domain
|
||||||
- [ ] describe_identity_provider
|
- [X] describe_identity_provider
|
||||||
- [ ] describe_resource_server
|
- [ ] describe_resource_server
|
||||||
- [ ] describe_risk_configuration
|
- [ ] describe_risk_configuration
|
||||||
- [ ] describe_user_import_job
|
- [ ] describe_user_import_job
|
||||||
- [ ] describe_user_pool
|
- [X] describe_user_pool
|
||||||
- [ ] describe_user_pool_client
|
- [X] describe_user_pool_client
|
||||||
- [ ] describe_user_pool_domain
|
- [X] describe_user_pool_domain
|
||||||
- [ ] forget_device
|
- [ ] forget_device
|
||||||
- [ ] forgot_password
|
- [ ] forgot_password
|
||||||
- [ ] get_csv_header
|
- [ ] get_csv_header
|
||||||
- [ ] get_device
|
- [ ] get_device
|
||||||
- [ ] get_group
|
- [X] get_group
|
||||||
- [ ] get_identity_provider_by_identifier
|
- [ ] get_identity_provider_by_identifier
|
||||||
- [ ] get_signing_certificate
|
- [ ] get_signing_certificate
|
||||||
- [ ] get_ui_customization
|
- [ ] get_ui_customization
|
||||||
@ -895,16 +895,16 @@
|
|||||||
- [ ] global_sign_out
|
- [ ] global_sign_out
|
||||||
- [ ] initiate_auth
|
- [ ] initiate_auth
|
||||||
- [ ] list_devices
|
- [ ] list_devices
|
||||||
- [ ] list_groups
|
- [X] list_groups
|
||||||
- [ ] list_identity_providers
|
- [X] list_identity_providers
|
||||||
- [ ] list_resource_servers
|
- [ ] list_resource_servers
|
||||||
- [ ] list_user_import_jobs
|
- [ ] list_user_import_jobs
|
||||||
- [ ] list_user_pool_clients
|
- [X] list_user_pool_clients
|
||||||
- [ ] list_user_pools
|
- [X] list_user_pools
|
||||||
- [ ] list_users
|
- [X] list_users
|
||||||
- [ ] list_users_in_group
|
- [X] list_users_in_group
|
||||||
- [ ] resend_confirmation_code
|
- [ ] resend_confirmation_code
|
||||||
- [ ] respond_to_auth_challenge
|
- [X] respond_to_auth_challenge
|
||||||
- [ ] set_risk_configuration
|
- [ ] set_risk_configuration
|
||||||
- [ ] set_ui_customization
|
- [ ] set_ui_customization
|
||||||
- [ ] set_user_mfa_preference
|
- [ ] set_user_mfa_preference
|
||||||
@ -916,11 +916,11 @@
|
|||||||
- [ ] update_auth_event_feedback
|
- [ ] update_auth_event_feedback
|
||||||
- [ ] update_device_status
|
- [ ] update_device_status
|
||||||
- [ ] update_group
|
- [ ] update_group
|
||||||
- [ ] update_identity_provider
|
- [x] update_identity_provider
|
||||||
- [ ] update_resource_server
|
- [ ] update_resource_server
|
||||||
- [ ] update_user_attributes
|
- [ ] update_user_attributes
|
||||||
- [ ] update_user_pool
|
- [ ] update_user_pool
|
||||||
- [ ] update_user_pool_client
|
- [X] update_user_pool_client
|
||||||
- [ ] verify_software_token
|
- [ ] verify_software_token
|
||||||
- [ ] verify_user_attribute
|
- [ ] verify_user_attribute
|
||||||
|
|
||||||
@ -2208,7 +2208,7 @@
|
|||||||
- [ ] describe_event_types
|
- [ ] describe_event_types
|
||||||
- [ ] describe_events
|
- [ ] describe_events
|
||||||
|
|
||||||
## iam - 48% implemented
|
## iam - 62% implemented
|
||||||
- [ ] add_client_id_to_open_id_connect_provider
|
- [ ] add_client_id_to_open_id_connect_provider
|
||||||
- [X] add_role_to_instance_profile
|
- [X] add_role_to_instance_profile
|
||||||
- [X] add_user_to_group
|
- [X] add_user_to_group
|
||||||
@ -2225,7 +2225,7 @@
|
|||||||
- [X] create_policy
|
- [X] create_policy
|
||||||
- [X] create_policy_version
|
- [X] create_policy_version
|
||||||
- [X] create_role
|
- [X] create_role
|
||||||
- [ ] create_saml_provider
|
- [X] create_saml_provider
|
||||||
- [ ] create_service_linked_role
|
- [ ] create_service_linked_role
|
||||||
- [ ] create_service_specific_credential
|
- [ ] create_service_specific_credential
|
||||||
- [X] create_user
|
- [X] create_user
|
||||||
@ -2243,11 +2243,11 @@
|
|||||||
- [X] delete_policy_version
|
- [X] delete_policy_version
|
||||||
- [X] delete_role
|
- [X] delete_role
|
||||||
- [X] delete_role_policy
|
- [X] delete_role_policy
|
||||||
- [ ] delete_saml_provider
|
- [X] delete_saml_provider
|
||||||
- [X] delete_server_certificate
|
- [X] delete_server_certificate
|
||||||
- [ ] delete_service_linked_role
|
- [ ] delete_service_linked_role
|
||||||
- [ ] delete_service_specific_credential
|
- [ ] delete_service_specific_credential
|
||||||
- [ ] delete_signing_certificate
|
- [X] delete_signing_certificate
|
||||||
- [ ] delete_ssh_public_key
|
- [ ] delete_ssh_public_key
|
||||||
- [X] delete_user
|
- [X] delete_user
|
||||||
- [X] delete_user_policy
|
- [X] delete_user_policy
|
||||||
@ -2257,7 +2257,7 @@
|
|||||||
- [X] detach_user_policy
|
- [X] detach_user_policy
|
||||||
- [X] enable_mfa_device
|
- [X] enable_mfa_device
|
||||||
- [ ] generate_credential_report
|
- [ ] generate_credential_report
|
||||||
- [ ] get_access_key_last_used
|
- [X] get_access_key_last_used
|
||||||
- [X] get_account_authorization_details
|
- [X] get_account_authorization_details
|
||||||
- [ ] get_account_password_policy
|
- [ ] get_account_password_policy
|
||||||
- [ ] get_account_summary
|
- [ ] get_account_summary
|
||||||
@ -2273,13 +2273,13 @@
|
|||||||
- [X] get_policy_version
|
- [X] get_policy_version
|
||||||
- [X] get_role
|
- [X] get_role
|
||||||
- [X] get_role_policy
|
- [X] get_role_policy
|
||||||
- [ ] get_saml_provider
|
- [X] get_saml_provider
|
||||||
- [X] get_server_certificate
|
- [X] get_server_certificate
|
||||||
- [ ] get_service_linked_role_deletion_status
|
- [ ] get_service_linked_role_deletion_status
|
||||||
- [ ] get_ssh_public_key
|
- [ ] get_ssh_public_key
|
||||||
- [X] get_user
|
- [X] get_user
|
||||||
- [X] get_user_policy
|
- [X] get_user_policy
|
||||||
- [ ] list_access_keys
|
- [X] list_access_keys
|
||||||
- [X] list_account_aliases
|
- [X] list_account_aliases
|
||||||
- [X] list_attached_group_policies
|
- [X] list_attached_group_policies
|
||||||
- [X] list_attached_role_policies
|
- [X] list_attached_role_policies
|
||||||
@ -2287,19 +2287,21 @@
|
|||||||
- [ ] list_entities_for_policy
|
- [ ] list_entities_for_policy
|
||||||
- [X] list_group_policies
|
- [X] list_group_policies
|
||||||
- [X] list_groups
|
- [X] list_groups
|
||||||
- [ ] list_groups_for_user
|
- [X] list_groups_for_user
|
||||||
- [ ] list_instance_profiles
|
- [X] list_instance_profiles
|
||||||
- [ ] list_instance_profiles_for_role
|
- [X] list_instance_profiles_for_role
|
||||||
- [X] list_mfa_devices
|
- [X] list_mfa_devices
|
||||||
- [ ] list_open_id_connect_providers
|
- [ ] list_open_id_connect_providers
|
||||||
- [X] list_policies
|
- [X] list_policies
|
||||||
- [X] list_policy_versions
|
- [X] list_policy_versions
|
||||||
- [X] list_role_policies
|
- [X] list_role_policies
|
||||||
- [ ] list_roles
|
- [X] list_roles
|
||||||
- [ ] list_saml_providers
|
- [X] list_role_tags
|
||||||
- [ ] list_server_certificates
|
- [ ] list_user_tags
|
||||||
|
- [X] list_saml_providers
|
||||||
|
- [X] list_server_certificates
|
||||||
- [ ] list_service_specific_credentials
|
- [ ] list_service_specific_credentials
|
||||||
- [ ] list_signing_certificates
|
- [X] list_signing_certificates
|
||||||
- [ ] list_ssh_public_keys
|
- [ ] list_ssh_public_keys
|
||||||
- [X] list_user_policies
|
- [X] list_user_policies
|
||||||
- [X] list_users
|
- [X] list_users
|
||||||
@ -2315,6 +2317,10 @@
|
|||||||
- [ ] set_default_policy_version
|
- [ ] set_default_policy_version
|
||||||
- [ ] simulate_custom_policy
|
- [ ] simulate_custom_policy
|
||||||
- [ ] simulate_principal_policy
|
- [ ] simulate_principal_policy
|
||||||
|
- [X] tag_role
|
||||||
|
- [ ] tag_user
|
||||||
|
- [X] untag_role
|
||||||
|
- [ ] untag_user
|
||||||
- [X] update_access_key
|
- [X] update_access_key
|
||||||
- [ ] update_account_password_policy
|
- [ ] update_account_password_policy
|
||||||
- [ ] update_assume_role_policy
|
- [ ] update_assume_role_policy
|
||||||
@ -2323,14 +2329,14 @@
|
|||||||
- [ ] update_open_id_connect_provider_thumbprint
|
- [ ] update_open_id_connect_provider_thumbprint
|
||||||
- [ ] update_role
|
- [ ] update_role
|
||||||
- [ ] update_role_description
|
- [ ] update_role_description
|
||||||
- [ ] update_saml_provider
|
- [X] update_saml_provider
|
||||||
- [ ] update_server_certificate
|
- [ ] update_server_certificate
|
||||||
- [ ] update_service_specific_credential
|
- [ ] update_service_specific_credential
|
||||||
- [ ] update_signing_certificate
|
- [X] update_signing_certificate
|
||||||
- [ ] update_ssh_public_key
|
- [ ] update_ssh_public_key
|
||||||
- [ ] update_user
|
- [X] update_user
|
||||||
- [ ] upload_server_certificate
|
- [X] upload_server_certificate
|
||||||
- [ ] upload_signing_certificate
|
- [X] upload_signing_certificate
|
||||||
- [ ] upload_ssh_public_key
|
- [ ] upload_ssh_public_key
|
||||||
|
|
||||||
## importexport - 0% implemented
|
## importexport - 0% implemented
|
||||||
@ -2376,11 +2382,11 @@
|
|||||||
- [ ] unsubscribe_from_event
|
- [ ] unsubscribe_from_event
|
||||||
- [ ] update_assessment_target
|
- [ ] update_assessment_target
|
||||||
|
|
||||||
## iot - 30% implemented
|
## iot - 32% implemented
|
||||||
- [ ] accept_certificate_transfer
|
- [ ] accept_certificate_transfer
|
||||||
- [X] add_thing_to_thing_group
|
- [X] add_thing_to_thing_group
|
||||||
- [ ] associate_targets_with_job
|
- [ ] associate_targets_with_job
|
||||||
- [ ] attach_policy
|
- [X] attach_policy
|
||||||
- [X] attach_principal_policy
|
- [X] attach_principal_policy
|
||||||
- [X] attach_thing_principal
|
- [X] attach_thing_principal
|
||||||
- [ ] cancel_certificate_transfer
|
- [ ] cancel_certificate_transfer
|
||||||
@ -2429,7 +2435,7 @@
|
|||||||
- [X] describe_thing_group
|
- [X] describe_thing_group
|
||||||
- [ ] describe_thing_registration_task
|
- [ ] describe_thing_registration_task
|
||||||
- [X] describe_thing_type
|
- [X] describe_thing_type
|
||||||
- [ ] detach_policy
|
- [X] detach_policy
|
||||||
- [X] detach_principal_policy
|
- [X] detach_principal_policy
|
||||||
- [X] detach_thing_principal
|
- [X] detach_thing_principal
|
||||||
- [ ] disable_topic_rule
|
- [ ] disable_topic_rule
|
||||||
@ -3542,7 +3548,7 @@
|
|||||||
- [ ] get_bucket_inventory_configuration
|
- [ ] get_bucket_inventory_configuration
|
||||||
- [ ] get_bucket_lifecycle
|
- [ ] get_bucket_lifecycle
|
||||||
- [ ] get_bucket_lifecycle_configuration
|
- [ ] get_bucket_lifecycle_configuration
|
||||||
- [ ] get_bucket_location
|
- [X] get_bucket_location
|
||||||
- [ ] get_bucket_logging
|
- [ ] get_bucket_logging
|
||||||
- [ ] get_bucket_metrics_configuration
|
- [ ] get_bucket_metrics_configuration
|
||||||
- [ ] get_bucket_notification
|
- [ ] get_bucket_notification
|
||||||
@ -3648,14 +3654,14 @@
|
|||||||
## secretsmanager - 33% implemented
|
## secretsmanager - 33% implemented
|
||||||
- [ ] cancel_rotate_secret
|
- [ ] cancel_rotate_secret
|
||||||
- [X] create_secret
|
- [X] create_secret
|
||||||
- [ ] delete_secret
|
- [X] delete_secret
|
||||||
- [X] describe_secret
|
- [X] describe_secret
|
||||||
- [X] get_random_password
|
- [X] get_random_password
|
||||||
- [X] get_secret_value
|
- [X] get_secret_value
|
||||||
- [ ] list_secret_version_ids
|
- [ ] list_secret_version_ids
|
||||||
- [ ] list_secrets
|
- [x] list_secrets
|
||||||
- [ ] put_secret_value
|
- [ ] put_secret_value
|
||||||
- [ ] restore_secret
|
- [X] restore_secret
|
||||||
- [X] rotate_secret
|
- [X] rotate_secret
|
||||||
- [ ] tag_resource
|
- [ ] tag_resource
|
||||||
- [ ] untag_resource
|
- [ ] untag_resource
|
||||||
@ -3913,7 +3919,7 @@
|
|||||||
- [ ] delete_message_batch
|
- [ ] delete_message_batch
|
||||||
- [X] delete_queue
|
- [X] delete_queue
|
||||||
- [ ] get_queue_attributes
|
- [ ] get_queue_attributes
|
||||||
- [ ] get_queue_url
|
- [X] get_queue_url
|
||||||
- [X] list_dead_letter_source_queues
|
- [X] list_dead_letter_source_queues
|
||||||
- [ ] list_queue_tags
|
- [ ] list_queue_tags
|
||||||
- [X] list_queues
|
- [X] list_queues
|
||||||
|
1
Makefile
1
Makefile
@ -19,6 +19,7 @@ test: lint
|
|||||||
rm -f .coverage
|
rm -f .coverage
|
||||||
rm -rf cover
|
rm -rf cover
|
||||||
@nosetests -sv --with-coverage --cover-html ./tests/ $(TEST_EXCLUDE)
|
@nosetests -sv --with-coverage --cover-html ./tests/ $(TEST_EXCLUDE)
|
||||||
|
|
||||||
test_server:
|
test_server:
|
||||||
@TEST_SERVER_MODE=true nosetests -sv --with-coverage --cover-html ./tests/
|
@TEST_SERVER_MODE=true nosetests -sv --with-coverage --cover-html ./tests/
|
||||||
|
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
[![Join the chat at https://gitter.im/awsmoto/Lobby](https://badges.gitter.im/awsmoto/Lobby.svg)](https://gitter.im/awsmoto/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[![Join the chat at https://gitter.im/awsmoto/Lobby](https://badges.gitter.im/awsmoto/Lobby.svg)](https://gitter.im/awsmoto/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.org/spulec/moto.png?branch=master)](https://travis-ci.org/spulec/moto)
|
[![Build Status](https://travis-ci.org/spulec/moto.svg?branch=master)](https://travis-ci.org/spulec/moto)
|
||||||
[![Coverage Status](https://coveralls.io/repos/spulec/moto/badge.png?branch=master)](https://coveralls.io/r/spulec/moto)
|
[![Coverage Status](https://coveralls.io/repos/spulec/moto/badge.svg?branch=master)](https://coveralls.io/r/spulec/moto)
|
||||||
[![Docs](https://readthedocs.org/projects/pip/badge/?version=stable)](http://docs.getmoto.org)
|
[![Docs](https://readthedocs.org/projects/pip/badge/?version=stable)](http://docs.getmoto.org)
|
||||||
|
|
||||||
# In a nutshell
|
# In a nutshell
|
||||||
@ -74,6 +74,8 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L
|
|||||||
|------------------------------------------------------------------------------|
|
|------------------------------------------------------------------------------|
|
||||||
| Cognito Identity Provider | @mock_cognitoidp| basic endpoints done |
|
| Cognito Identity Provider | @mock_cognitoidp| basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|------------------------------------------------------------------------------|
|
||||||
|
| Config | @mock_config | basic endpoints done |
|
||||||
|
|------------------------------------------------------------------------------|
|
||||||
| Data Pipeline | @mock_datapipeline| basic endpoints done |
|
| Data Pipeline | @mock_datapipeline| basic endpoints done |
|
||||||
|------------------------------------------------------------------------------|
|
|------------------------------------------------------------------------------|
|
||||||
| DynamoDB | @mock_dynamodb | core endpoints done |
|
| DynamoDB | @mock_dynamodb | core endpoints done |
|
||||||
@ -259,7 +261,7 @@ It uses flask, which isn't a default dependency. You can install the
|
|||||||
server 'extra' package with:
|
server 'extra' package with:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
pip install moto[server]
|
pip install "moto[server]"
|
||||||
```
|
```
|
||||||
|
|
||||||
You can then start it running a service:
|
You can then start it running a service:
|
||||||
|
@ -3,7 +3,7 @@ import logging
|
|||||||
# logging.getLogger('boto').setLevel(logging.CRITICAL)
|
# logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
__title__ = 'moto'
|
__title__ = 'moto'
|
||||||
__version__ = '1.3.7'
|
__version__ = '1.3.8'
|
||||||
|
|
||||||
from .acm import mock_acm # flake8: noqa
|
from .acm import mock_acm # flake8: noqa
|
||||||
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
|
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
|
||||||
@ -13,9 +13,11 @@ from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated
|
|||||||
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # flake8: noqa
|
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # flake8: noqa
|
||||||
from .cognitoidentity import mock_cognitoidentity, mock_cognitoidentity_deprecated # flake8: noqa
|
from .cognitoidentity import mock_cognitoidentity, mock_cognitoidentity_deprecated # flake8: noqa
|
||||||
from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: noqa
|
from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # flake8: noqa
|
||||||
|
from .config import mock_config # flake8: noqa
|
||||||
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa
|
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa
|
||||||
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa
|
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa
|
||||||
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa
|
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa
|
||||||
|
from .dynamodbstreams import mock_dynamodbstreams # flake8: noqa
|
||||||
from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa
|
from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa
|
||||||
from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa
|
from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa
|
||||||
from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa
|
from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa
|
||||||
|
@ -243,7 +243,7 @@ class CertBundle(BaseModel):
|
|||||||
'KeyAlgorithm': key_algo,
|
'KeyAlgorithm': key_algo,
|
||||||
'NotAfter': datetime_to_epoch(self._cert.not_valid_after),
|
'NotAfter': datetime_to_epoch(self._cert.not_valid_after),
|
||||||
'NotBefore': datetime_to_epoch(self._cert.not_valid_before),
|
'NotBefore': datetime_to_epoch(self._cert.not_valid_before),
|
||||||
'Serial': self._cert.serial,
|
'Serial': self._cert.serial_number,
|
||||||
'SignatureAlgorithm': self._cert.signature_algorithm_oid._name.upper().replace('ENCRYPTION', ''),
|
'SignatureAlgorithm': self._cert.signature_algorithm_oid._name.upper().replace('ENCRYPTION', ''),
|
||||||
'Status': self.status, # One of PENDING_VALIDATION, ISSUED, INACTIVE, EXPIRED, VALIDATION_TIMED_OUT, REVOKED, FAILED.
|
'Status': self.status, # One of PENDING_VALIDATION, ISSUED, INACTIVE, EXPIRED, VALIDATION_TIMED_OUT, REVOKED, FAILED.
|
||||||
'Subject': 'CN={0}'.format(self.common_name),
|
'Subject': 'CN={0}'.format(self.common_name),
|
||||||
|
@ -17,10 +17,12 @@ ASG_NAME_TAG = "aws:autoscaling:groupName"
|
|||||||
|
|
||||||
|
|
||||||
class InstanceState(object):
|
class InstanceState(object):
|
||||||
def __init__(self, instance, lifecycle_state="InService", health_status="Healthy"):
|
def __init__(self, instance, lifecycle_state="InService",
|
||||||
|
health_status="Healthy", protected_from_scale_in=False):
|
||||||
self.instance = instance
|
self.instance = instance
|
||||||
self.lifecycle_state = lifecycle_state
|
self.lifecycle_state = lifecycle_state
|
||||||
self.health_status = health_status
|
self.health_status = health_status
|
||||||
|
self.protected_from_scale_in = protected_from_scale_in
|
||||||
|
|
||||||
|
|
||||||
class FakeScalingPolicy(BaseModel):
|
class FakeScalingPolicy(BaseModel):
|
||||||
@ -152,7 +154,8 @@ class FakeAutoScalingGroup(BaseModel):
|
|||||||
min_size, launch_config_name, vpc_zone_identifier,
|
min_size, launch_config_name, vpc_zone_identifier,
|
||||||
default_cooldown, health_check_period, health_check_type,
|
default_cooldown, health_check_period, health_check_type,
|
||||||
load_balancers, target_group_arns, placement_group, termination_policies,
|
load_balancers, target_group_arns, placement_group, termination_policies,
|
||||||
autoscaling_backend, tags):
|
autoscaling_backend, tags,
|
||||||
|
new_instances_protected_from_scale_in=False):
|
||||||
self.autoscaling_backend = autoscaling_backend
|
self.autoscaling_backend = autoscaling_backend
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
@ -178,6 +181,7 @@ class FakeAutoScalingGroup(BaseModel):
|
|||||||
self.target_group_arns = target_group_arns
|
self.target_group_arns = target_group_arns
|
||||||
self.placement_group = placement_group
|
self.placement_group = placement_group
|
||||||
self.termination_policies = termination_policies
|
self.termination_policies = termination_policies
|
||||||
|
self.new_instances_protected_from_scale_in = new_instances_protected_from_scale_in
|
||||||
|
|
||||||
self.suspended_processes = []
|
self.suspended_processes = []
|
||||||
self.instance_states = []
|
self.instance_states = []
|
||||||
@ -210,6 +214,8 @@ class FakeAutoScalingGroup(BaseModel):
|
|||||||
placement_group=None,
|
placement_group=None,
|
||||||
termination_policies=properties.get("TerminationPolicies", []),
|
termination_policies=properties.get("TerminationPolicies", []),
|
||||||
tags=properties.get("Tags", []),
|
tags=properties.get("Tags", []),
|
||||||
|
new_instances_protected_from_scale_in=properties.get(
|
||||||
|
"NewInstancesProtectedFromScaleIn", False)
|
||||||
)
|
)
|
||||||
return group
|
return group
|
||||||
|
|
||||||
@ -238,7 +244,8 @@ class FakeAutoScalingGroup(BaseModel):
|
|||||||
def update(self, availability_zones, desired_capacity, max_size, min_size,
|
def update(self, availability_zones, desired_capacity, max_size, min_size,
|
||||||
launch_config_name, vpc_zone_identifier, default_cooldown,
|
launch_config_name, vpc_zone_identifier, default_cooldown,
|
||||||
health_check_period, health_check_type,
|
health_check_period, health_check_type,
|
||||||
placement_group, termination_policies):
|
placement_group, termination_policies,
|
||||||
|
new_instances_protected_from_scale_in=None):
|
||||||
if availability_zones:
|
if availability_zones:
|
||||||
self.availability_zones = availability_zones
|
self.availability_zones = availability_zones
|
||||||
if max_size is not None:
|
if max_size is not None:
|
||||||
@ -256,6 +263,8 @@ class FakeAutoScalingGroup(BaseModel):
|
|||||||
self.health_check_period = health_check_period
|
self.health_check_period = health_check_period
|
||||||
if health_check_type is not None:
|
if health_check_type is not None:
|
||||||
self.health_check_type = health_check_type
|
self.health_check_type = health_check_type
|
||||||
|
if new_instances_protected_from_scale_in is not None:
|
||||||
|
self.new_instances_protected_from_scale_in = new_instances_protected_from_scale_in
|
||||||
|
|
||||||
if desired_capacity is not None:
|
if desired_capacity is not None:
|
||||||
self.set_desired_capacity(desired_capacity)
|
self.set_desired_capacity(desired_capacity)
|
||||||
@ -280,12 +289,16 @@ class FakeAutoScalingGroup(BaseModel):
|
|||||||
else:
|
else:
|
||||||
# Need to remove some instances
|
# Need to remove some instances
|
||||||
count_to_remove = curr_instance_count - self.desired_capacity
|
count_to_remove = curr_instance_count - self.desired_capacity
|
||||||
instances_to_remove = self.instance_states[:count_to_remove]
|
instances_to_remove = [ # only remove unprotected
|
||||||
|
state for state in self.instance_states
|
||||||
|
if not state.protected_from_scale_in
|
||||||
|
][:count_to_remove]
|
||||||
|
if instances_to_remove: # just in case not instances to remove
|
||||||
instance_ids_to_remove = [
|
instance_ids_to_remove = [
|
||||||
instance.instance.id for instance in instances_to_remove]
|
instance.instance.id for instance in instances_to_remove]
|
||||||
self.autoscaling_backend.ec2_backend.terminate_instances(
|
self.autoscaling_backend.ec2_backend.terminate_instances(
|
||||||
instance_ids_to_remove)
|
instance_ids_to_remove)
|
||||||
self.instance_states = self.instance_states[count_to_remove:]
|
self.instance_states = list(set(self.instance_states) - set(instances_to_remove))
|
||||||
|
|
||||||
def get_propagated_tags(self):
|
def get_propagated_tags(self):
|
||||||
propagated_tags = {}
|
propagated_tags = {}
|
||||||
@ -310,7 +323,10 @@ class FakeAutoScalingGroup(BaseModel):
|
|||||||
)
|
)
|
||||||
for instance in reservation.instances:
|
for instance in reservation.instances:
|
||||||
instance.autoscaling_group = self
|
instance.autoscaling_group = self
|
||||||
self.instance_states.append(InstanceState(instance))
|
self.instance_states.append(InstanceState(
|
||||||
|
instance,
|
||||||
|
protected_from_scale_in=self.new_instances_protected_from_scale_in,
|
||||||
|
))
|
||||||
|
|
||||||
def append_target_groups(self, target_group_arns):
|
def append_target_groups(self, target_group_arns):
|
||||||
append = [x for x in target_group_arns if x not in self.target_group_arns]
|
append = [x for x in target_group_arns if x not in self.target_group_arns]
|
||||||
@ -372,7 +388,8 @@ class AutoScalingBackend(BaseBackend):
|
|||||||
default_cooldown, health_check_period,
|
default_cooldown, health_check_period,
|
||||||
health_check_type, load_balancers,
|
health_check_type, load_balancers,
|
||||||
target_group_arns, placement_group,
|
target_group_arns, placement_group,
|
||||||
termination_policies, tags):
|
termination_policies, tags,
|
||||||
|
new_instances_protected_from_scale_in=False):
|
||||||
|
|
||||||
def make_int(value):
|
def make_int(value):
|
||||||
return int(value) if value is not None else value
|
return int(value) if value is not None else value
|
||||||
@ -403,6 +420,7 @@ class AutoScalingBackend(BaseBackend):
|
|||||||
termination_policies=termination_policies,
|
termination_policies=termination_policies,
|
||||||
autoscaling_backend=self,
|
autoscaling_backend=self,
|
||||||
tags=tags,
|
tags=tags,
|
||||||
|
new_instances_protected_from_scale_in=new_instances_protected_from_scale_in,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.autoscaling_groups[name] = group
|
self.autoscaling_groups[name] = group
|
||||||
@ -415,12 +433,14 @@ class AutoScalingBackend(BaseBackend):
|
|||||||
launch_config_name, vpc_zone_identifier,
|
launch_config_name, vpc_zone_identifier,
|
||||||
default_cooldown, health_check_period,
|
default_cooldown, health_check_period,
|
||||||
health_check_type, placement_group,
|
health_check_type, placement_group,
|
||||||
termination_policies):
|
termination_policies,
|
||||||
|
new_instances_protected_from_scale_in=None):
|
||||||
group = self.autoscaling_groups[name]
|
group = self.autoscaling_groups[name]
|
||||||
group.update(availability_zones, desired_capacity, max_size,
|
group.update(availability_zones, desired_capacity, max_size,
|
||||||
min_size, launch_config_name, vpc_zone_identifier,
|
min_size, launch_config_name, vpc_zone_identifier,
|
||||||
default_cooldown, health_check_period, health_check_type,
|
default_cooldown, health_check_period, health_check_type,
|
||||||
placement_group, termination_policies)
|
placement_group, termination_policies,
|
||||||
|
new_instances_protected_from_scale_in=new_instances_protected_from_scale_in)
|
||||||
return group
|
return group
|
||||||
|
|
||||||
def describe_auto_scaling_groups(self, names):
|
def describe_auto_scaling_groups(self, names):
|
||||||
@ -448,7 +468,13 @@ class AutoScalingBackend(BaseBackend):
|
|||||||
raise ResourceContentionError
|
raise ResourceContentionError
|
||||||
else:
|
else:
|
||||||
group.desired_capacity = original_size + len(instance_ids)
|
group.desired_capacity = original_size + len(instance_ids)
|
||||||
new_instances = [InstanceState(self.ec2_backend.get_instance(x)) for x in instance_ids]
|
new_instances = [
|
||||||
|
InstanceState(
|
||||||
|
self.ec2_backend.get_instance(x),
|
||||||
|
protected_from_scale_in=group.new_instances_protected_from_scale_in,
|
||||||
|
)
|
||||||
|
for x in instance_ids
|
||||||
|
]
|
||||||
for instance in new_instances:
|
for instance in new_instances:
|
||||||
self.ec2_backend.create_tags([instance.instance.id], {ASG_NAME_TAG: group.name})
|
self.ec2_backend.create_tags([instance.instance.id], {ASG_NAME_TAG: group.name})
|
||||||
group.instance_states.extend(new_instances)
|
group.instance_states.extend(new_instances)
|
||||||
@ -626,6 +652,13 @@ class AutoScalingBackend(BaseBackend):
|
|||||||
group = self.autoscaling_groups[group_name]
|
group = self.autoscaling_groups[group_name]
|
||||||
group.suspended_processes = scaling_processes or []
|
group.suspended_processes = scaling_processes or []
|
||||||
|
|
||||||
|
def set_instance_protection(self, group_name, instance_ids, protected_from_scale_in):
|
||||||
|
group = self.autoscaling_groups[group_name]
|
||||||
|
protected_instances = [
|
||||||
|
x for x in group.instance_states if x.instance.id in instance_ids]
|
||||||
|
for instance in protected_instances:
|
||||||
|
instance.protected_from_scale_in = protected_from_scale_in
|
||||||
|
|
||||||
|
|
||||||
autoscaling_backends = {}
|
autoscaling_backends = {}
|
||||||
for region, ec2_backend in ec2_backends.items():
|
for region, ec2_backend in ec2_backends.items():
|
||||||
|
@ -85,6 +85,8 @@ class AutoScalingResponse(BaseResponse):
|
|||||||
termination_policies=self._get_multi_param(
|
termination_policies=self._get_multi_param(
|
||||||
'TerminationPolicies.member'),
|
'TerminationPolicies.member'),
|
||||||
tags=self._get_list_prefix('Tags.member'),
|
tags=self._get_list_prefix('Tags.member'),
|
||||||
|
new_instances_protected_from_scale_in=self._get_bool_param(
|
||||||
|
'NewInstancesProtectedFromScaleIn', False)
|
||||||
)
|
)
|
||||||
template = self.response_template(CREATE_AUTOSCALING_GROUP_TEMPLATE)
|
template = self.response_template(CREATE_AUTOSCALING_GROUP_TEMPLATE)
|
||||||
return template.render()
|
return template.render()
|
||||||
@ -192,6 +194,8 @@ class AutoScalingResponse(BaseResponse):
|
|||||||
placement_group=self._get_param('PlacementGroup'),
|
placement_group=self._get_param('PlacementGroup'),
|
||||||
termination_policies=self._get_multi_param(
|
termination_policies=self._get_multi_param(
|
||||||
'TerminationPolicies.member'),
|
'TerminationPolicies.member'),
|
||||||
|
new_instances_protected_from_scale_in=self._get_bool_param(
|
||||||
|
'NewInstancesProtectedFromScaleIn', None)
|
||||||
)
|
)
|
||||||
template = self.response_template(UPDATE_AUTOSCALING_GROUP_TEMPLATE)
|
template = self.response_template(UPDATE_AUTOSCALING_GROUP_TEMPLATE)
|
||||||
return template.render()
|
return template.render()
|
||||||
@ -290,6 +294,15 @@ class AutoScalingResponse(BaseResponse):
|
|||||||
template = self.response_template(SUSPEND_PROCESSES_TEMPLATE)
|
template = self.response_template(SUSPEND_PROCESSES_TEMPLATE)
|
||||||
return template.render()
|
return template.render()
|
||||||
|
|
||||||
|
def set_instance_protection(self):
|
||||||
|
group_name = self._get_param('AutoScalingGroupName')
|
||||||
|
instance_ids = self._get_multi_param('InstanceIds.member')
|
||||||
|
protected_from_scale_in = self._get_bool_param('ProtectedFromScaleIn')
|
||||||
|
self.autoscaling_backend.set_instance_protection(
|
||||||
|
group_name, instance_ids, protected_from_scale_in)
|
||||||
|
template = self.response_template(SET_INSTANCE_PROTECTION_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
|
||||||
CREATE_LAUNCH_CONFIGURATION_TEMPLATE = """<CreateLaunchConfigurationResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
CREATE_LAUNCH_CONFIGURATION_TEMPLATE = """<CreateLaunchConfigurationResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
@ -391,7 +404,7 @@ ATTACH_LOAD_BALANCER_TARGET_GROUPS_TEMPLATE = """<AttachLoadBalancerTargetGroups
|
|||||||
<AttachLoadBalancerTargetGroupsResult>
|
<AttachLoadBalancerTargetGroupsResult>
|
||||||
</AttachLoadBalancerTargetGroupsResult>
|
</AttachLoadBalancerTargetGroupsResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</AttachLoadBalancerTargetGroupsResponse>"""
|
</AttachLoadBalancerTargetGroupsResponse>"""
|
||||||
|
|
||||||
@ -399,7 +412,7 @@ ATTACH_INSTANCES_TEMPLATE = """<AttachInstancesResponse xmlns="http://autoscalin
|
|||||||
<AttachInstancesResult>
|
<AttachInstancesResult>
|
||||||
</AttachInstancesResult>
|
</AttachInstancesResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</AttachInstancesResponse>"""
|
</AttachInstancesResponse>"""
|
||||||
|
|
||||||
@ -415,7 +428,7 @@ DESCRIBE_LOAD_BALANCER_TARGET_GROUPS = """<DescribeLoadBalancerTargetGroupsRespo
|
|||||||
</LoadBalancerTargetGroups>
|
</LoadBalancerTargetGroups>
|
||||||
</DescribeLoadBalancerTargetGroupsResult>
|
</DescribeLoadBalancerTargetGroupsResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</DescribeLoadBalancerTargetGroupsResponse>"""
|
</DescribeLoadBalancerTargetGroupsResponse>"""
|
||||||
|
|
||||||
@ -441,7 +454,7 @@ DETACH_INSTANCES_TEMPLATE = """<DetachInstancesResponse xmlns="http://autoscalin
|
|||||||
</Activities>
|
</Activities>
|
||||||
</DetachInstancesResult>
|
</DetachInstancesResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</DetachInstancesResponse>"""
|
</DetachInstancesResponse>"""
|
||||||
|
|
||||||
@ -449,7 +462,7 @@ DETACH_LOAD_BALANCER_TARGET_GROUPS_TEMPLATE = """<DetachLoadBalancerTargetGroups
|
|||||||
<DetachLoadBalancerTargetGroupsResult>
|
<DetachLoadBalancerTargetGroupsResult>
|
||||||
</DetachLoadBalancerTargetGroupsResult>
|
</DetachLoadBalancerTargetGroupsResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</DetachLoadBalancerTargetGroupsResponse>"""
|
</DetachLoadBalancerTargetGroupsResponse>"""
|
||||||
|
|
||||||
@ -490,6 +503,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """<DescribeAutoScalingGroupsResponse xml
|
|||||||
<InstanceId>{{ instance_state.instance.id }}</InstanceId>
|
<InstanceId>{{ instance_state.instance.id }}</InstanceId>
|
||||||
<LaunchConfigurationName>{{ group.launch_config_name }}</LaunchConfigurationName>
|
<LaunchConfigurationName>{{ group.launch_config_name }}</LaunchConfigurationName>
|
||||||
<LifecycleState>{{ instance_state.lifecycle_state }}</LifecycleState>
|
<LifecycleState>{{ instance_state.lifecycle_state }}</LifecycleState>
|
||||||
|
<ProtectedFromScaleIn>{{ instance_state.protected_from_scale_in|string|lower }}</ProtectedFromScaleIn>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</Instances>
|
</Instances>
|
||||||
@ -508,6 +522,15 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """<DescribeAutoScalingGroupsResponse xml
|
|||||||
{% else %}
|
{% else %}
|
||||||
<LoadBalancerNames/>
|
<LoadBalancerNames/>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% if group.target_group_arns %}
|
||||||
|
<TargetGroupARNs>
|
||||||
|
{% for target_group_arn in group.target_group_arns %}
|
||||||
|
<member>{{ target_group_arn }}</member>
|
||||||
|
{% endfor %}
|
||||||
|
</TargetGroupARNs>
|
||||||
|
{% else %}
|
||||||
|
<TargetGroupARNs/>
|
||||||
|
{% endif %}
|
||||||
<MinSize>{{ group.min_size }}</MinSize>
|
<MinSize>{{ group.min_size }}</MinSize>
|
||||||
{% if group.vpc_zone_identifier %}
|
{% if group.vpc_zone_identifier %}
|
||||||
<VPCZoneIdentifier>{{ group.vpc_zone_identifier }}</VPCZoneIdentifier>
|
<VPCZoneIdentifier>{{ group.vpc_zone_identifier }}</VPCZoneIdentifier>
|
||||||
@ -530,6 +553,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """<DescribeAutoScalingGroupsResponse xml
|
|||||||
{% if group.placement_group %}
|
{% if group.placement_group %}
|
||||||
<PlacementGroup>{{ group.placement_group }}</PlacementGroup>
|
<PlacementGroup>{{ group.placement_group }}</PlacementGroup>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
<NewInstancesProtectedFromScaleIn>{{ group.new_instances_protected_from_scale_in|string|lower }}</NewInstancesProtectedFromScaleIn>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</AutoScalingGroups>
|
</AutoScalingGroups>
|
||||||
@ -565,6 +589,7 @@ DESCRIBE_AUTOSCALING_INSTANCES_TEMPLATE = """<DescribeAutoScalingInstancesRespon
|
|||||||
<InstanceId>{{ instance_state.instance.id }}</InstanceId>
|
<InstanceId>{{ instance_state.instance.id }}</InstanceId>
|
||||||
<LaunchConfigurationName>{{ instance_state.instance.autoscaling_group.launch_config_name }}</LaunchConfigurationName>
|
<LaunchConfigurationName>{{ instance_state.instance.autoscaling_group.launch_config_name }}</LaunchConfigurationName>
|
||||||
<LifecycleState>{{ instance_state.lifecycle_state }}</LifecycleState>
|
<LifecycleState>{{ instance_state.lifecycle_state }}</LifecycleState>
|
||||||
|
<ProtectedFromScaleIn>{{ instance_state.protected_from_scale_in|string|lower }}</ProtectedFromScaleIn>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</AutoScalingInstances>
|
</AutoScalingInstances>
|
||||||
@ -629,7 +654,7 @@ DELETE_POLICY_TEMPLATE = """<DeleteScalingPolicyResponse xmlns="http://autoscali
|
|||||||
ATTACH_LOAD_BALANCERS_TEMPLATE = """<AttachLoadBalancersResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
ATTACH_LOAD_BALANCERS_TEMPLATE = """<AttachLoadBalancersResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
||||||
<AttachLoadBalancersResult></AttachLoadBalancersResult>
|
<AttachLoadBalancersResult></AttachLoadBalancersResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</AttachLoadBalancersResponse>"""
|
</AttachLoadBalancersResponse>"""
|
||||||
|
|
||||||
@ -645,14 +670,14 @@ DESCRIBE_LOAD_BALANCERS_TEMPLATE = """<DescribeLoadBalancersResponse xmlns="http
|
|||||||
</LoadBalancers>
|
</LoadBalancers>
|
||||||
</DescribeLoadBalancersResult>
|
</DescribeLoadBalancersResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</DescribeLoadBalancersResponse>"""
|
</DescribeLoadBalancersResponse>"""
|
||||||
|
|
||||||
DETACH_LOAD_BALANCERS_TEMPLATE = """<DetachLoadBalancersResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
DETACH_LOAD_BALANCERS_TEMPLATE = """<DetachLoadBalancersResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
||||||
<DetachLoadBalancersResult></DetachLoadBalancersResult>
|
<DetachLoadBalancersResult></DetachLoadBalancersResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</DetachLoadBalancersResponse>"""
|
</DetachLoadBalancersResponse>"""
|
||||||
|
|
||||||
@ -665,6 +690,13 @@ SUSPEND_PROCESSES_TEMPLATE = """<SuspendProcessesResponse xmlns="http://autoscal
|
|||||||
SET_INSTANCE_HEALTH_TEMPLATE = """<SetInstanceHealthResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
SET_INSTANCE_HEALTH_TEMPLATE = """<SetInstanceHealthResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
||||||
<SetInstanceHealthResponse></SetInstanceHealthResponse>
|
<SetInstanceHealthResponse></SetInstanceHealthResponse>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</SetInstanceHealthResponse>"""
|
</SetInstanceHealthResponse>"""
|
||||||
|
|
||||||
|
SET_INSTANCE_PROTECTION_TEMPLATE = """<SetInstanceProtectionResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
||||||
|
<SetInstanceProtectionResult></SetInstanceProtectionResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId></RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</SetInstanceProtectionResponse>"""
|
||||||
|
@ -386,7 +386,7 @@ class LambdaFunction(BaseModel):
|
|||||||
'Role': properties['Role'],
|
'Role': properties['Role'],
|
||||||
'Runtime': properties['Runtime'],
|
'Runtime': properties['Runtime'],
|
||||||
}
|
}
|
||||||
optional_properties = 'Description MemorySize Publish Timeout VpcConfig'.split()
|
optional_properties = 'Description MemorySize Publish Timeout VpcConfig Environment'.split()
|
||||||
# NOTE: Not doing `properties.get(k, DEFAULT)` to avoid duplicating the
|
# NOTE: Not doing `properties.get(k, DEFAULT)` to avoid duplicating the
|
||||||
# default logic
|
# default logic
|
||||||
for prop in optional_properties:
|
for prop in optional_properties:
|
||||||
@ -500,6 +500,11 @@ class LambdaStorage(object):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
return self._functions[name]['latest']
|
return self._functions[name]['latest']
|
||||||
|
|
||||||
|
def list_versions_by_function(self, name):
|
||||||
|
if name not in self._functions:
|
||||||
|
return None
|
||||||
|
return [self._functions[name]['latest']]
|
||||||
|
|
||||||
def get_arn(self, arn):
|
def get_arn(self, arn):
|
||||||
return self._arns.get(arn, None)
|
return self._arns.get(arn, None)
|
||||||
|
|
||||||
@ -607,6 +612,9 @@ class LambdaBackend(BaseBackend):
|
|||||||
def get_function(self, function_name, qualifier=None):
|
def get_function(self, function_name, qualifier=None):
|
||||||
return self._lambdas.get_function(function_name, qualifier)
|
return self._lambdas.get_function(function_name, qualifier)
|
||||||
|
|
||||||
|
def list_versions_by_function(self, function_name):
|
||||||
|
return self._lambdas.list_versions_by_function(function_name)
|
||||||
|
|
||||||
def get_function_by_arn(self, function_arn):
|
def get_function_by_arn(self, function_arn):
|
||||||
return self._lambdas.get_arn(function_arn)
|
return self._lambdas.get_arn(function_arn)
|
||||||
|
|
||||||
|
@ -52,7 +52,11 @@ class LambdaResponse(BaseResponse):
|
|||||||
self.setup_class(request, full_url, headers)
|
self.setup_class(request, full_url, headers)
|
||||||
if request.method == 'GET':
|
if request.method == 'GET':
|
||||||
# This is ListVersionByFunction
|
# This is ListVersionByFunction
|
||||||
raise ValueError("Cannot handle request")
|
|
||||||
|
path = request.path if hasattr(request, 'path') else path_url(request.url)
|
||||||
|
function_name = path.split('/')[-2]
|
||||||
|
return self._list_versions_by_function(function_name)
|
||||||
|
|
||||||
elif request.method == 'POST':
|
elif request.method == 'POST':
|
||||||
return self._publish_function(request, full_url, headers)
|
return self._publish_function(request, full_url, headers)
|
||||||
else:
|
else:
|
||||||
@ -151,6 +155,19 @@ class LambdaResponse(BaseResponse):
|
|||||||
|
|
||||||
return 200, {}, json.dumps(result)
|
return 200, {}, json.dumps(result)
|
||||||
|
|
||||||
|
def _list_versions_by_function(self, function_name):
|
||||||
|
result = {
|
||||||
|
'Versions': []
|
||||||
|
}
|
||||||
|
|
||||||
|
functions = self.lambda_backend.list_versions_by_function(function_name)
|
||||||
|
if functions:
|
||||||
|
for fn in functions:
|
||||||
|
json_data = fn.get_configuration()
|
||||||
|
result['Versions'].append(json_data)
|
||||||
|
|
||||||
|
return 200, {}, json.dumps(result)
|
||||||
|
|
||||||
def _create_function(self, request, full_url, headers):
|
def _create_function(self, request, full_url, headers):
|
||||||
try:
|
try:
|
||||||
fn = self.lambda_backend.create_function(self.json_body)
|
fn = self.lambda_backend.create_function(self.json_body)
|
||||||
@ -166,7 +183,7 @@ class LambdaResponse(BaseResponse):
|
|||||||
fn = self.lambda_backend.publish_function(function_name)
|
fn = self.lambda_backend.publish_function(function_name)
|
||||||
if fn:
|
if fn:
|
||||||
config = fn.get_configuration()
|
config = fn.get_configuration()
|
||||||
return 200, {}, json.dumps(config)
|
return 201, {}, json.dumps(config)
|
||||||
else:
|
else:
|
||||||
return 404, {}, "{}"
|
return 404, {}, "{}"
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@ from moto.core import moto_api_backends
|
|||||||
from moto.datapipeline import datapipeline_backends
|
from moto.datapipeline import datapipeline_backends
|
||||||
from moto.dynamodb import dynamodb_backends
|
from moto.dynamodb import dynamodb_backends
|
||||||
from moto.dynamodb2 import dynamodb_backends2
|
from moto.dynamodb2 import dynamodb_backends2
|
||||||
|
from moto.dynamodbstreams import dynamodbstreams_backends
|
||||||
from moto.ec2 import ec2_backends
|
from moto.ec2 import ec2_backends
|
||||||
from moto.ecr import ecr_backends
|
from moto.ecr import ecr_backends
|
||||||
from moto.ecs import ecs_backends
|
from moto.ecs import ecs_backends
|
||||||
@ -45,7 +46,7 @@ from moto.iot import iot_backends
|
|||||||
from moto.iotdata import iotdata_backends
|
from moto.iotdata import iotdata_backends
|
||||||
from moto.batch import batch_backends
|
from moto.batch import batch_backends
|
||||||
from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends
|
from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends
|
||||||
|
from moto.config import config_backends
|
||||||
|
|
||||||
BACKENDS = {
|
BACKENDS = {
|
||||||
'acm': acm_backends,
|
'acm': acm_backends,
|
||||||
@ -56,9 +57,11 @@ BACKENDS = {
|
|||||||
'cloudwatch': cloudwatch_backends,
|
'cloudwatch': cloudwatch_backends,
|
||||||
'cognito-identity': cognitoidentity_backends,
|
'cognito-identity': cognitoidentity_backends,
|
||||||
'cognito-idp': cognitoidp_backends,
|
'cognito-idp': cognitoidp_backends,
|
||||||
|
'config': config_backends,
|
||||||
'datapipeline': datapipeline_backends,
|
'datapipeline': datapipeline_backends,
|
||||||
'dynamodb': dynamodb_backends,
|
'dynamodb': dynamodb_backends,
|
||||||
'dynamodb2': dynamodb_backends2,
|
'dynamodb2': dynamodb_backends2,
|
||||||
|
'dynamodbstreams': dynamodbstreams_backends,
|
||||||
'ec2': ec2_backends,
|
'ec2': ec2_backends,
|
||||||
'ecr': ecr_backends,
|
'ecr': ecr_backends,
|
||||||
'ecs': ecs_backends,
|
'ecs': ecs_backends,
|
||||||
|
@ -27,7 +27,7 @@ class BatchResponse(BaseResponse):
|
|||||||
elif not hasattr(self, '_json'):
|
elif not hasattr(self, '_json'):
|
||||||
try:
|
try:
|
||||||
self._json = json.loads(self.body)
|
self._json = json.loads(self.body)
|
||||||
except json.JSONDecodeError:
|
except ValueError:
|
||||||
print()
|
print()
|
||||||
return self._json
|
return self._json
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
from datetime import datetime
|
from datetime import datetime, timedelta
|
||||||
import json
|
import json
|
||||||
import yaml
|
import yaml
|
||||||
import uuid
|
import uuid
|
||||||
@ -12,11 +12,156 @@ from .parsing import ResourceMap, OutputMap
|
|||||||
from .utils import (
|
from .utils import (
|
||||||
generate_changeset_id,
|
generate_changeset_id,
|
||||||
generate_stack_id,
|
generate_stack_id,
|
||||||
|
generate_stackset_arn,
|
||||||
|
generate_stackset_id,
|
||||||
yaml_tag_constructor,
|
yaml_tag_constructor,
|
||||||
|
validate_template_cfn_lint,
|
||||||
)
|
)
|
||||||
from .exceptions import ValidationError
|
from .exceptions import ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
class FakeStackSet(BaseModel):
|
||||||
|
|
||||||
|
def __init__(self, stackset_id, name, template, region='us-east-1',
|
||||||
|
status='ACTIVE', description=None, parameters=None, tags=None,
|
||||||
|
admin_role='AWSCloudFormationStackSetAdministrationRole',
|
||||||
|
execution_role='AWSCloudFormationStackSetExecutionRole'):
|
||||||
|
self.id = stackset_id
|
||||||
|
self.arn = generate_stackset_arn(stackset_id, region)
|
||||||
|
self.name = name
|
||||||
|
self.template = template
|
||||||
|
self.description = description
|
||||||
|
self.parameters = parameters
|
||||||
|
self.tags = tags
|
||||||
|
self.admin_role = admin_role
|
||||||
|
self.execution_role = execution_role
|
||||||
|
self.status = status
|
||||||
|
self.instances = FakeStackInstances(parameters, self.id, self.name)
|
||||||
|
self.stack_instances = self.instances.stack_instances
|
||||||
|
self.operations = []
|
||||||
|
|
||||||
|
def _create_operation(self, operation_id, action, status, accounts=[], regions=[]):
|
||||||
|
operation = {
|
||||||
|
'OperationId': str(operation_id),
|
||||||
|
'Action': action,
|
||||||
|
'Status': status,
|
||||||
|
'CreationTimestamp': datetime.now(),
|
||||||
|
'EndTimestamp': datetime.now() + timedelta(minutes=2),
|
||||||
|
'Instances': [{account: region} for account in accounts for region in regions],
|
||||||
|
}
|
||||||
|
|
||||||
|
self.operations += [operation]
|
||||||
|
return operation
|
||||||
|
|
||||||
|
def get_operation(self, operation_id):
|
||||||
|
for operation in self.operations:
|
||||||
|
if operation_id == operation['OperationId']:
|
||||||
|
return operation
|
||||||
|
raise ValidationError(operation_id)
|
||||||
|
|
||||||
|
def update_operation(self, operation_id, status):
|
||||||
|
operation = self.get_operation(operation_id)
|
||||||
|
operation['Status'] = status
|
||||||
|
return operation_id
|
||||||
|
|
||||||
|
def delete(self):
|
||||||
|
self.status = 'DELETED'
|
||||||
|
|
||||||
|
def update(self, template, description, parameters, tags, admin_role,
|
||||||
|
execution_role, accounts, regions, operation_id=None):
|
||||||
|
if not operation_id:
|
||||||
|
operation_id = uuid.uuid4()
|
||||||
|
|
||||||
|
self.template = template if template else self.template
|
||||||
|
self.description = description if description is not None else self.description
|
||||||
|
self.parameters = parameters if parameters else self.parameters
|
||||||
|
self.tags = tags if tags else self.tags
|
||||||
|
self.admin_role = admin_role if admin_role else self.admin_role
|
||||||
|
self.execution_role = execution_role if execution_role else self.execution_role
|
||||||
|
|
||||||
|
if accounts and regions:
|
||||||
|
self.update_instances(accounts, regions, self.parameters)
|
||||||
|
|
||||||
|
operation = self._create_operation(operation_id=operation_id,
|
||||||
|
action='UPDATE', status='SUCCEEDED', accounts=accounts,
|
||||||
|
regions=regions)
|
||||||
|
return operation
|
||||||
|
|
||||||
|
def create_stack_instances(self, accounts, regions, parameters, operation_id=None):
|
||||||
|
if not operation_id:
|
||||||
|
operation_id = uuid.uuid4()
|
||||||
|
if not parameters:
|
||||||
|
parameters = self.parameters
|
||||||
|
|
||||||
|
self.instances.create_instances(accounts, regions, parameters, operation_id)
|
||||||
|
self._create_operation(operation_id=operation_id, action='CREATE',
|
||||||
|
status='SUCCEEDED', accounts=accounts, regions=regions)
|
||||||
|
|
||||||
|
def delete_stack_instances(self, accounts, regions, operation_id=None):
|
||||||
|
if not operation_id:
|
||||||
|
operation_id = uuid.uuid4()
|
||||||
|
|
||||||
|
self.instances.delete(accounts, regions)
|
||||||
|
|
||||||
|
operation = self._create_operation(operation_id=operation_id, action='DELETE',
|
||||||
|
status='SUCCEEDED', accounts=accounts, regions=regions)
|
||||||
|
return operation
|
||||||
|
|
||||||
|
def update_instances(self, accounts, regions, parameters, operation_id=None):
|
||||||
|
if not operation_id:
|
||||||
|
operation_id = uuid.uuid4()
|
||||||
|
|
||||||
|
self.instances.update(accounts, regions, parameters)
|
||||||
|
operation = self._create_operation(operation_id=operation_id,
|
||||||
|
action='UPDATE', status='SUCCEEDED', accounts=accounts,
|
||||||
|
regions=regions)
|
||||||
|
return operation
|
||||||
|
|
||||||
|
|
||||||
|
class FakeStackInstances(BaseModel):
|
||||||
|
def __init__(self, parameters, stackset_id, stackset_name):
|
||||||
|
self.parameters = parameters if parameters else {}
|
||||||
|
self.stackset_id = stackset_id
|
||||||
|
self.stack_name = "StackSet-{}".format(stackset_id)
|
||||||
|
self.stackset_name = stackset_name
|
||||||
|
self.stack_instances = []
|
||||||
|
|
||||||
|
def create_instances(self, accounts, regions, parameters, operation_id):
|
||||||
|
new_instances = []
|
||||||
|
for region in regions:
|
||||||
|
for account in accounts:
|
||||||
|
instance = {
|
||||||
|
'StackId': generate_stack_id(self.stack_name, region, account),
|
||||||
|
'StackSetId': self.stackset_id,
|
||||||
|
'Region': region,
|
||||||
|
'Account': account,
|
||||||
|
'Status': "CURRENT",
|
||||||
|
'ParameterOverrides': parameters if parameters else [],
|
||||||
|
}
|
||||||
|
new_instances.append(instance)
|
||||||
|
self.stack_instances += new_instances
|
||||||
|
return new_instances
|
||||||
|
|
||||||
|
def update(self, accounts, regions, parameters):
|
||||||
|
for account in accounts:
|
||||||
|
for region in regions:
|
||||||
|
instance = self.get_instance(account, region)
|
||||||
|
if parameters:
|
||||||
|
instance['ParameterOverrides'] = parameters
|
||||||
|
else:
|
||||||
|
instance['ParameterOverrides'] = []
|
||||||
|
|
||||||
|
def delete(self, accounts, regions):
|
||||||
|
for i, instance in enumerate(self.stack_instances):
|
||||||
|
if instance['Region'] in regions and instance['Account'] in accounts:
|
||||||
|
self.stack_instances.pop(i)
|
||||||
|
|
||||||
|
def get_instance(self, account, region):
|
||||||
|
for i, instance in enumerate(self.stack_instances):
|
||||||
|
if instance['Region'] == region and instance['Account'] == account:
|
||||||
|
return self.stack_instances[i]
|
||||||
|
|
||||||
|
|
||||||
class FakeStack(BaseModel):
|
class FakeStack(BaseModel):
|
||||||
|
|
||||||
def __init__(self, stack_id, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, cross_stack_resources=None, create_change_set=False):
|
def __init__(self, stack_id, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, cross_stack_resources=None, create_change_set=False):
|
||||||
@ -84,9 +229,9 @@ class FakeStack(BaseModel):
|
|||||||
def _parse_template(self):
|
def _parse_template(self):
|
||||||
yaml.add_multi_constructor('', yaml_tag_constructor)
|
yaml.add_multi_constructor('', yaml_tag_constructor)
|
||||||
try:
|
try:
|
||||||
self.template_dict = yaml.load(self.template)
|
self.template_dict = yaml.load(self.template, Loader=yaml.Loader)
|
||||||
except yaml.parser.ParserError:
|
except yaml.parser.ParserError:
|
||||||
self.template_dict = json.loads(self.template)
|
self.template_dict = json.loads(self.template, Loader=yaml.Loader)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def stack_parameters(self):
|
def stack_parameters(self):
|
||||||
@ -126,6 +271,49 @@ class FakeStack(BaseModel):
|
|||||||
self.status = "DELETE_COMPLETE"
|
self.status = "DELETE_COMPLETE"
|
||||||
|
|
||||||
|
|
||||||
|
class FakeChange(BaseModel):
|
||||||
|
|
||||||
|
def __init__(self, action, logical_resource_id, resource_type):
|
||||||
|
self.action = action
|
||||||
|
self.logical_resource_id = logical_resource_id
|
||||||
|
self.resource_type = resource_type
|
||||||
|
|
||||||
|
|
||||||
|
class FakeChangeSet(FakeStack):
|
||||||
|
|
||||||
|
def __init__(self, stack_id, stack_name, stack_template, change_set_id, change_set_name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, cross_stack_resources=None):
|
||||||
|
super(FakeChangeSet, self).__init__(
|
||||||
|
stack_id,
|
||||||
|
stack_name,
|
||||||
|
stack_template,
|
||||||
|
parameters,
|
||||||
|
region_name,
|
||||||
|
notification_arns=notification_arns,
|
||||||
|
tags=tags,
|
||||||
|
role_arn=role_arn,
|
||||||
|
cross_stack_resources=cross_stack_resources,
|
||||||
|
create_change_set=True,
|
||||||
|
)
|
||||||
|
self.stack_name = stack_name
|
||||||
|
self.change_set_id = change_set_id
|
||||||
|
self.change_set_name = change_set_name
|
||||||
|
self.changes = self.diff(template=template, parameters=parameters)
|
||||||
|
|
||||||
|
def diff(self, template, parameters=None):
|
||||||
|
self.template = template
|
||||||
|
self._parse_template()
|
||||||
|
changes = []
|
||||||
|
resources_by_action = self.resource_map.diff(self.template_dict, parameters)
|
||||||
|
for action, resources in resources_by_action.items():
|
||||||
|
for resource_name, resource in resources.items():
|
||||||
|
changes.append(FakeChange(
|
||||||
|
action=action,
|
||||||
|
logical_resource_id=resource_name,
|
||||||
|
resource_type=resource['ResourceType'],
|
||||||
|
))
|
||||||
|
return changes
|
||||||
|
|
||||||
|
|
||||||
class FakeEvent(BaseModel):
|
class FakeEvent(BaseModel):
|
||||||
|
|
||||||
def __init__(self, stack_id, stack_name, logical_resource_id, physical_resource_id, resource_type, resource_status, resource_status_reason=None, resource_properties=None):
|
def __init__(self, stack_id, stack_name, logical_resource_id, physical_resource_id, resource_type, resource_status, resource_status_reason=None, resource_properties=None):
|
||||||
@ -145,10 +333,72 @@ class CloudFormationBackend(BaseBackend):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.stacks = OrderedDict()
|
self.stacks = OrderedDict()
|
||||||
|
self.stacksets = OrderedDict()
|
||||||
self.deleted_stacks = {}
|
self.deleted_stacks = {}
|
||||||
self.exports = OrderedDict()
|
self.exports = OrderedDict()
|
||||||
self.change_sets = OrderedDict()
|
self.change_sets = OrderedDict()
|
||||||
|
|
||||||
|
def create_stack_set(self, name, template, parameters, tags=None, description=None, region='us-east-1', admin_role=None, execution_role=None):
|
||||||
|
stackset_id = generate_stackset_id(name)
|
||||||
|
new_stackset = FakeStackSet(
|
||||||
|
stackset_id=stackset_id,
|
||||||
|
name=name,
|
||||||
|
template=template,
|
||||||
|
parameters=parameters,
|
||||||
|
description=description,
|
||||||
|
tags=tags,
|
||||||
|
admin_role=admin_role,
|
||||||
|
execution_role=execution_role,
|
||||||
|
)
|
||||||
|
self.stacksets[stackset_id] = new_stackset
|
||||||
|
return new_stackset
|
||||||
|
|
||||||
|
def get_stack_set(self, name):
|
||||||
|
stacksets = self.stacksets.keys()
|
||||||
|
for stackset in stacksets:
|
||||||
|
if self.stacksets[stackset].name == name:
|
||||||
|
return self.stacksets[stackset]
|
||||||
|
raise ValidationError(name)
|
||||||
|
|
||||||
|
def delete_stack_set(self, name):
|
||||||
|
stacksets = self.stacksets.keys()
|
||||||
|
for stackset in stacksets:
|
||||||
|
if self.stacksets[stackset].name == name:
|
||||||
|
self.stacksets[stackset].delete()
|
||||||
|
|
||||||
|
def create_stack_instances(self, stackset_name, accounts, regions, parameters, operation_id=None):
|
||||||
|
stackset = self.get_stack_set(stackset_name)
|
||||||
|
|
||||||
|
stackset.create_stack_instances(
|
||||||
|
accounts=accounts,
|
||||||
|
regions=regions,
|
||||||
|
parameters=parameters,
|
||||||
|
operation_id=operation_id,
|
||||||
|
)
|
||||||
|
return stackset
|
||||||
|
|
||||||
|
def update_stack_set(self, stackset_name, template=None, description=None,
|
||||||
|
parameters=None, tags=None, admin_role=None, execution_role=None,
|
||||||
|
accounts=None, regions=None, operation_id=None):
|
||||||
|
stackset = self.get_stack_set(stackset_name)
|
||||||
|
update = stackset.update(
|
||||||
|
template=template,
|
||||||
|
description=description,
|
||||||
|
parameters=parameters,
|
||||||
|
tags=tags,
|
||||||
|
admin_role=admin_role,
|
||||||
|
execution_role=execution_role,
|
||||||
|
accounts=accounts,
|
||||||
|
regions=regions,
|
||||||
|
operation_id=operation_id
|
||||||
|
)
|
||||||
|
return update
|
||||||
|
|
||||||
|
def delete_stack_instances(self, stackset_name, accounts, regions, operation_id=None):
|
||||||
|
stackset = self.get_stack_set(stackset_name)
|
||||||
|
stackset.delete_stack_instances(accounts, regions, operation_id)
|
||||||
|
return stackset
|
||||||
|
|
||||||
def create_stack(self, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, create_change_set=False):
|
def create_stack(self, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None, create_change_set=False):
|
||||||
stack_id = generate_stack_id(name)
|
stack_id = generate_stack_id(name)
|
||||||
new_stack = FakeStack(
|
new_stack = FakeStack(
|
||||||
@ -170,24 +420,62 @@ class CloudFormationBackend(BaseBackend):
|
|||||||
return new_stack
|
return new_stack
|
||||||
|
|
||||||
def create_change_set(self, stack_name, change_set_name, template, parameters, region_name, change_set_type, notification_arns=None, tags=None, role_arn=None):
|
def create_change_set(self, stack_name, change_set_name, template, parameters, region_name, change_set_type, notification_arns=None, tags=None, role_arn=None):
|
||||||
|
stack_id = None
|
||||||
|
stack_template = None
|
||||||
if change_set_type == 'UPDATE':
|
if change_set_type == 'UPDATE':
|
||||||
stacks = self.stacks.values()
|
stacks = self.stacks.values()
|
||||||
stack = None
|
stack = None
|
||||||
for s in stacks:
|
for s in stacks:
|
||||||
if s.name == stack_name:
|
if s.name == stack_name:
|
||||||
stack = s
|
stack = s
|
||||||
|
stack_id = stack.stack_id
|
||||||
|
stack_template = stack.template
|
||||||
if stack is None:
|
if stack is None:
|
||||||
raise ValidationError(stack_name)
|
raise ValidationError(stack_name)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
stack = self.create_stack(stack_name, template, parameters,
|
stack_id = generate_stack_id(stack_name)
|
||||||
region_name, notification_arns, tags,
|
stack_template = template
|
||||||
role_arn, create_change_set=True)
|
|
||||||
change_set_id = generate_changeset_id(change_set_name, region_name)
|
change_set_id = generate_changeset_id(change_set_name, region_name)
|
||||||
self.stacks[change_set_name] = {'Id': change_set_id,
|
new_change_set = FakeChangeSet(
|
||||||
'StackId': stack.stack_id}
|
stack_id=stack_id,
|
||||||
self.change_sets[change_set_id] = stack
|
stack_name=stack_name,
|
||||||
return change_set_id, stack.stack_id
|
stack_template=stack_template,
|
||||||
|
change_set_id=change_set_id,
|
||||||
|
change_set_name=change_set_name,
|
||||||
|
template=template,
|
||||||
|
parameters=parameters,
|
||||||
|
region_name=region_name,
|
||||||
|
notification_arns=notification_arns,
|
||||||
|
tags=tags,
|
||||||
|
role_arn=role_arn,
|
||||||
|
cross_stack_resources=self.exports
|
||||||
|
)
|
||||||
|
self.change_sets[change_set_id] = new_change_set
|
||||||
|
self.stacks[stack_id] = new_change_set
|
||||||
|
return change_set_id, stack_id
|
||||||
|
|
||||||
|
def delete_change_set(self, change_set_name, stack_name=None):
|
||||||
|
if change_set_name in self.change_sets:
|
||||||
|
# This means arn was passed in
|
||||||
|
del self.change_sets[change_set_name]
|
||||||
|
else:
|
||||||
|
for cs in self.change_sets:
|
||||||
|
if self.change_sets[cs].change_set_name == change_set_name:
|
||||||
|
del self.change_sets[cs]
|
||||||
|
|
||||||
|
def describe_change_set(self, change_set_name, stack_name=None):
|
||||||
|
change_set = None
|
||||||
|
if change_set_name in self.change_sets:
|
||||||
|
# This means arn was passed in
|
||||||
|
change_set = self.change_sets[change_set_name]
|
||||||
|
else:
|
||||||
|
for cs in self.change_sets:
|
||||||
|
if self.change_sets[cs].change_set_name == change_set_name:
|
||||||
|
change_set = self.change_sets[cs]
|
||||||
|
if change_set is None:
|
||||||
|
raise ValidationError(change_set_name)
|
||||||
|
return change_set
|
||||||
|
|
||||||
def execute_change_set(self, change_set_name, stack_name=None):
|
def execute_change_set(self, change_set_name, stack_name=None):
|
||||||
stack = None
|
stack = None
|
||||||
@ -196,7 +484,7 @@ class CloudFormationBackend(BaseBackend):
|
|||||||
stack = self.change_sets[change_set_name]
|
stack = self.change_sets[change_set_name]
|
||||||
else:
|
else:
|
||||||
for cs in self.change_sets:
|
for cs in self.change_sets:
|
||||||
if self.change_sets[cs].name == change_set_name:
|
if self.change_sets[cs].change_set_name == change_set_name:
|
||||||
stack = self.change_sets[cs]
|
stack = self.change_sets[cs]
|
||||||
if stack is None:
|
if stack is None:
|
||||||
raise ValidationError(stack_name)
|
raise ValidationError(stack_name)
|
||||||
@ -222,8 +510,15 @@ class CloudFormationBackend(BaseBackend):
|
|||||||
else:
|
else:
|
||||||
return list(stacks)
|
return list(stacks)
|
||||||
|
|
||||||
|
def list_change_sets(self):
|
||||||
|
return self.change_sets.values()
|
||||||
|
|
||||||
def list_stacks(self):
|
def list_stacks(self):
|
||||||
return self.stacks.values()
|
return [
|
||||||
|
v for v in self.stacks.values()
|
||||||
|
] + [
|
||||||
|
v for v in self.deleted_stacks.values()
|
||||||
|
]
|
||||||
|
|
||||||
def get_stack(self, name_or_stack_id):
|
def get_stack(self, name_or_stack_id):
|
||||||
all_stacks = dict(self.deleted_stacks, **self.stacks)
|
all_stacks = dict(self.deleted_stacks, **self.stacks)
|
||||||
@ -270,6 +565,9 @@ class CloudFormationBackend(BaseBackend):
|
|||||||
next_token = str(token + 100) if len(all_exports) > token + 100 else None
|
next_token = str(token + 100) if len(all_exports) > token + 100 else None
|
||||||
return exports, next_token
|
return exports, next_token
|
||||||
|
|
||||||
|
def validate_template(self, template):
|
||||||
|
return validate_template_cfn_lint(template)
|
||||||
|
|
||||||
def _validate_export_uniqueness(self, stack):
|
def _validate_export_uniqueness(self, stack):
|
||||||
new_stack_export_names = [x.name for x in stack.exports]
|
new_stack_export_names = [x.name for x in stack.exports]
|
||||||
export_names = self.exports.keys()
|
export_names = self.exports.keys()
|
||||||
|
@ -465,36 +465,70 @@ class ResourceMap(collections.Mapping):
|
|||||||
ec2_models.ec2_backends[self._region_name].create_tags(
|
ec2_models.ec2_backends[self._region_name].create_tags(
|
||||||
[self[resource].physical_resource_id], self.tags)
|
[self[resource].physical_resource_id], self.tags)
|
||||||
|
|
||||||
def update(self, template, parameters=None):
|
def diff(self, template, parameters=None):
|
||||||
if parameters:
|
if parameters:
|
||||||
self.input_parameters = parameters
|
self.input_parameters = parameters
|
||||||
self.load_mapping()
|
self.load_mapping()
|
||||||
self.load_parameters()
|
self.load_parameters()
|
||||||
self.load_conditions()
|
self.load_conditions()
|
||||||
|
|
||||||
|
old_template = self._resource_json_map
|
||||||
|
new_template = template['Resources']
|
||||||
|
|
||||||
|
resource_names_by_action = {
|
||||||
|
'Add': set(new_template) - set(old_template),
|
||||||
|
'Modify': set(name for name in new_template if name in old_template and new_template[
|
||||||
|
name] != old_template[name]),
|
||||||
|
'Remove': set(old_template) - set(new_template)
|
||||||
|
}
|
||||||
|
resources_by_action = {
|
||||||
|
'Add': {},
|
||||||
|
'Modify': {},
|
||||||
|
'Remove': {},
|
||||||
|
}
|
||||||
|
|
||||||
|
for resource_name in resource_names_by_action['Add']:
|
||||||
|
resources_by_action['Add'][resource_name] = {
|
||||||
|
'LogicalResourceId': resource_name,
|
||||||
|
'ResourceType': new_template[resource_name]['Type']
|
||||||
|
}
|
||||||
|
|
||||||
|
for resource_name in resource_names_by_action['Modify']:
|
||||||
|
resources_by_action['Modify'][resource_name] = {
|
||||||
|
'LogicalResourceId': resource_name,
|
||||||
|
'ResourceType': new_template[resource_name]['Type']
|
||||||
|
}
|
||||||
|
|
||||||
|
for resource_name in resource_names_by_action['Remove']:
|
||||||
|
resources_by_action['Remove'][resource_name] = {
|
||||||
|
'LogicalResourceId': resource_name,
|
||||||
|
'ResourceType': old_template[resource_name]['Type']
|
||||||
|
}
|
||||||
|
|
||||||
|
return resources_by_action
|
||||||
|
|
||||||
|
def update(self, template, parameters=None):
|
||||||
|
resources_by_action = self.diff(template, parameters)
|
||||||
|
|
||||||
old_template = self._resource_json_map
|
old_template = self._resource_json_map
|
||||||
new_template = template['Resources']
|
new_template = template['Resources']
|
||||||
self._resource_json_map = new_template
|
self._resource_json_map = new_template
|
||||||
|
|
||||||
new_resource_names = set(new_template) - set(old_template)
|
for resource_name, resource in resources_by_action['Add'].items():
|
||||||
for resource_name in new_resource_names:
|
|
||||||
resource_json = new_template[resource_name]
|
resource_json = new_template[resource_name]
|
||||||
new_resource = parse_and_create_resource(
|
new_resource = parse_and_create_resource(
|
||||||
resource_name, resource_json, self, self._region_name)
|
resource_name, resource_json, self, self._region_name)
|
||||||
self._parsed_resources[resource_name] = new_resource
|
self._parsed_resources[resource_name] = new_resource
|
||||||
|
|
||||||
removed_resource_names = set(old_template) - set(new_template)
|
for resource_name, resource in resources_by_action['Remove'].items():
|
||||||
for resource_name in removed_resource_names:
|
|
||||||
resource_json = old_template[resource_name]
|
resource_json = old_template[resource_name]
|
||||||
parse_and_delete_resource(
|
parse_and_delete_resource(
|
||||||
resource_name, resource_json, self, self._region_name)
|
resource_name, resource_json, self, self._region_name)
|
||||||
self._parsed_resources.pop(resource_name)
|
self._parsed_resources.pop(resource_name)
|
||||||
|
|
||||||
resources_to_update = set(name for name in new_template if name in old_template and new_template[
|
|
||||||
name] != old_template[name])
|
|
||||||
tries = 1
|
tries = 1
|
||||||
while resources_to_update and tries < 5:
|
while resources_by_action['Modify'] and tries < 5:
|
||||||
for resource_name in resources_to_update.copy():
|
for resource_name, resource in resources_by_action['Modify'].copy().items():
|
||||||
resource_json = new_template[resource_name]
|
resource_json = new_template[resource_name]
|
||||||
try:
|
try:
|
||||||
changed_resource = parse_and_update_resource(
|
changed_resource = parse_and_update_resource(
|
||||||
@ -505,7 +539,7 @@ class ResourceMap(collections.Mapping):
|
|||||||
last_exception = e
|
last_exception = e
|
||||||
else:
|
else:
|
||||||
self._parsed_resources[resource_name] = changed_resource
|
self._parsed_resources[resource_name] = changed_resource
|
||||||
resources_to_update.remove(resource_name)
|
del resources_by_action['Modify'][resource_name]
|
||||||
tries += 1
|
tries += 1
|
||||||
if tries == 5:
|
if tries == 5:
|
||||||
raise last_exception
|
raise last_exception
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import yaml
|
||||||
from six.moves.urllib.parse import urlparse
|
from six.moves.urllib.parse import urlparse
|
||||||
|
|
||||||
from moto.core.responses import BaseResponse
|
from moto.core.responses import BaseResponse
|
||||||
@ -87,7 +88,8 @@ class CloudFormationResponse(BaseResponse):
|
|||||||
role_arn = self._get_param('RoleARN')
|
role_arn = self._get_param('RoleARN')
|
||||||
update_or_create = self._get_param('ChangeSetType', 'CREATE')
|
update_or_create = self._get_param('ChangeSetType', 'CREATE')
|
||||||
parameters_list = self._get_list_prefix("Parameters.member")
|
parameters_list = self._get_list_prefix("Parameters.member")
|
||||||
tags = {tag[0]: tag[1] for tag in self._get_list_prefix("Tags.member")}
|
tags = dict((item['key'], item['value'])
|
||||||
|
for item in self._get_list_prefix("Tags.member"))
|
||||||
parameters = {param['parameter_key']: param['parameter_value']
|
parameters = {param['parameter_key']: param['parameter_value']
|
||||||
for param in parameters_list}
|
for param in parameters_list}
|
||||||
if template_url:
|
if template_url:
|
||||||
@ -118,6 +120,31 @@ class CloudFormationResponse(BaseResponse):
|
|||||||
template = self.response_template(CREATE_CHANGE_SET_RESPONSE_TEMPLATE)
|
template = self.response_template(CREATE_CHANGE_SET_RESPONSE_TEMPLATE)
|
||||||
return template.render(stack_id=stack_id, change_set_id=change_set_id)
|
return template.render(stack_id=stack_id, change_set_id=change_set_id)
|
||||||
|
|
||||||
|
def delete_change_set(self):
|
||||||
|
stack_name = self._get_param('StackName')
|
||||||
|
change_set_name = self._get_param('ChangeSetName')
|
||||||
|
|
||||||
|
self.cloudformation_backend.delete_change_set(change_set_name=change_set_name, stack_name=stack_name)
|
||||||
|
if self.request_json:
|
||||||
|
return json.dumps({
|
||||||
|
'DeleteChangeSetResponse': {
|
||||||
|
'DeleteChangeSetResult': {},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
template = self.response_template(DELETE_CHANGE_SET_RESPONSE_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
def describe_change_set(self):
|
||||||
|
stack_name = self._get_param('StackName')
|
||||||
|
change_set_name = self._get_param('ChangeSetName')
|
||||||
|
change_set = self.cloudformation_backend.describe_change_set(
|
||||||
|
change_set_name=change_set_name,
|
||||||
|
stack_name=stack_name,
|
||||||
|
)
|
||||||
|
template = self.response_template(DESCRIBE_CHANGE_SET_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(change_set=change_set)
|
||||||
|
|
||||||
@amzn_request_id
|
@amzn_request_id
|
||||||
def execute_change_set(self):
|
def execute_change_set(self):
|
||||||
stack_name = self._get_param('StackName')
|
stack_name = self._get_param('StackName')
|
||||||
@ -185,6 +212,11 @@ class CloudFormationResponse(BaseResponse):
|
|||||||
template = self.response_template(DESCRIBE_STACK_EVENTS_RESPONSE)
|
template = self.response_template(DESCRIBE_STACK_EVENTS_RESPONSE)
|
||||||
return template.render(stack=stack)
|
return template.render(stack=stack)
|
||||||
|
|
||||||
|
def list_change_sets(self):
|
||||||
|
change_sets = self.cloudformation_backend.list_change_sets()
|
||||||
|
template = self.response_template(LIST_CHANGE_SETS_RESPONSE)
|
||||||
|
return template.render(change_sets=change_sets)
|
||||||
|
|
||||||
def list_stacks(self):
|
def list_stacks(self):
|
||||||
stacks = self.cloudformation_backend.list_stacks()
|
stacks = self.cloudformation_backend.list_stacks()
|
||||||
template = self.response_template(LIST_STACKS_RESPONSE)
|
template = self.response_template(LIST_STACKS_RESPONSE)
|
||||||
@ -294,6 +326,201 @@ class CloudFormationResponse(BaseResponse):
|
|||||||
template = self.response_template(LIST_EXPORTS_RESPONSE)
|
template = self.response_template(LIST_EXPORTS_RESPONSE)
|
||||||
return template.render(exports=exports, next_token=next_token)
|
return template.render(exports=exports, next_token=next_token)
|
||||||
|
|
||||||
|
def validate_template(self):
|
||||||
|
cfn_lint = self.cloudformation_backend.validate_template(self._get_param('TemplateBody'))
|
||||||
|
if cfn_lint:
|
||||||
|
raise ValidationError(cfn_lint[0].message)
|
||||||
|
description = ""
|
||||||
|
try:
|
||||||
|
description = json.loads(self._get_param('TemplateBody'))['Description']
|
||||||
|
except (ValueError, KeyError):
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
description = yaml.load(self._get_param('TemplateBody'))['Description']
|
||||||
|
except (yaml.ParserError, KeyError):
|
||||||
|
pass
|
||||||
|
template = self.response_template(VALIDATE_STACK_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(description=description)
|
||||||
|
|
||||||
|
def create_stack_set(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
stack_body = self._get_param('TemplateBody')
|
||||||
|
template_url = self._get_param('TemplateURL')
|
||||||
|
# role_arn = self._get_param('RoleARN')
|
||||||
|
parameters_list = self._get_list_prefix("Parameters.member")
|
||||||
|
tags = dict((item['key'], item['value'])
|
||||||
|
for item in self._get_list_prefix("Tags.member"))
|
||||||
|
|
||||||
|
# Copy-Pasta - Hack dict-comprehension
|
||||||
|
parameters = dict([
|
||||||
|
(parameter['parameter_key'], parameter['parameter_value'])
|
||||||
|
for parameter
|
||||||
|
in parameters_list
|
||||||
|
])
|
||||||
|
if template_url:
|
||||||
|
stack_body = self._get_stack_from_s3_url(template_url)
|
||||||
|
|
||||||
|
stackset = self.cloudformation_backend.create_stack_set(
|
||||||
|
name=stackset_name,
|
||||||
|
template=stack_body,
|
||||||
|
parameters=parameters,
|
||||||
|
tags=tags,
|
||||||
|
# role_arn=role_arn,
|
||||||
|
)
|
||||||
|
if self.request_json:
|
||||||
|
return json.dumps({
|
||||||
|
'CreateStackSetResponse': {
|
||||||
|
'CreateStackSetResult': {
|
||||||
|
'StackSetId': stackset.stackset_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
template = self.response_template(CREATE_STACK_SET_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(stackset=stackset)
|
||||||
|
|
||||||
|
def create_stack_instances(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
accounts = self._get_multi_param('Accounts.member')
|
||||||
|
regions = self._get_multi_param('Regions.member')
|
||||||
|
parameters = self._get_multi_param('ParameterOverrides.member')
|
||||||
|
self.cloudformation_backend.create_stack_instances(stackset_name, accounts, regions, parameters)
|
||||||
|
template = self.response_template(CREATE_STACK_INSTANCES_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
def delete_stack_set(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
self.cloudformation_backend.delete_stack_set(stackset_name)
|
||||||
|
template = self.response_template(DELETE_STACK_SET_RESPONSE_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
def delete_stack_instances(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
accounts = self._get_multi_param('Accounts.member')
|
||||||
|
regions = self._get_multi_param('Regions.member')
|
||||||
|
operation = self.cloudformation_backend.delete_stack_instances(stackset_name, accounts, regions)
|
||||||
|
|
||||||
|
template = self.response_template(DELETE_STACK_INSTANCES_TEMPLATE)
|
||||||
|
return template.render(operation=operation)
|
||||||
|
|
||||||
|
def describe_stack_set(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
stackset = self.cloudformation_backend.get_stack_set(stackset_name)
|
||||||
|
|
||||||
|
if not stackset.admin_role:
|
||||||
|
stackset.admin_role = 'arn:aws:iam::123456789012:role/AWSCloudFormationStackSetAdministrationRole'
|
||||||
|
if not stackset.execution_role:
|
||||||
|
stackset.execution_role = 'AWSCloudFormationStackSetExecutionRole'
|
||||||
|
|
||||||
|
template = self.response_template(DESCRIBE_STACK_SET_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(stackset=stackset)
|
||||||
|
|
||||||
|
def describe_stack_instance(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
account = self._get_param('StackInstanceAccount')
|
||||||
|
region = self._get_param('StackInstanceRegion')
|
||||||
|
|
||||||
|
instance = self.cloudformation_backend.get_stack_set(stackset_name).instances.get_instance(account, region)
|
||||||
|
template = self.response_template(DESCRIBE_STACK_INSTANCE_TEMPLATE)
|
||||||
|
rendered = template.render(instance=instance)
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
def list_stack_sets(self):
|
||||||
|
stacksets = self.cloudformation_backend.stacksets
|
||||||
|
template = self.response_template(LIST_STACK_SETS_TEMPLATE)
|
||||||
|
return template.render(stacksets=stacksets)
|
||||||
|
|
||||||
|
def list_stack_instances(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
stackset = self.cloudformation_backend.get_stack_set(stackset_name)
|
||||||
|
template = self.response_template(LIST_STACK_INSTANCES_TEMPLATE)
|
||||||
|
return template.render(stackset=stackset)
|
||||||
|
|
||||||
|
def list_stack_set_operations(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
stackset = self.cloudformation_backend.get_stack_set(stackset_name)
|
||||||
|
template = self.response_template(LIST_STACK_SET_OPERATIONS_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(stackset=stackset)
|
||||||
|
|
||||||
|
def stop_stack_set_operation(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
operation_id = self._get_param('OperationId')
|
||||||
|
stackset = self.cloudformation_backend.get_stack_set(stackset_name)
|
||||||
|
stackset.update_operation(operation_id, 'STOPPED')
|
||||||
|
template = self.response_template(STOP_STACK_SET_OPERATION_RESPONSE_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
def describe_stack_set_operation(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
operation_id = self._get_param('OperationId')
|
||||||
|
stackset = self.cloudformation_backend.get_stack_set(stackset_name)
|
||||||
|
operation = stackset.get_operation(operation_id)
|
||||||
|
template = self.response_template(DESCRIBE_STACKSET_OPERATION_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(stackset=stackset, operation=operation)
|
||||||
|
|
||||||
|
def list_stack_set_operation_results(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
operation_id = self._get_param('OperationId')
|
||||||
|
stackset = self.cloudformation_backend.get_stack_set(stackset_name)
|
||||||
|
operation = stackset.get_operation(operation_id)
|
||||||
|
template = self.response_template(LIST_STACK_SET_OPERATION_RESULTS_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(operation=operation)
|
||||||
|
|
||||||
|
def update_stack_set(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
operation_id = self._get_param('OperationId')
|
||||||
|
description = self._get_param('Description')
|
||||||
|
execution_role = self._get_param('ExecutionRoleName')
|
||||||
|
admin_role = self._get_param('AdministrationRoleARN')
|
||||||
|
accounts = self._get_multi_param('Accounts.member')
|
||||||
|
regions = self._get_multi_param('Regions.member')
|
||||||
|
template_body = self._get_param('TemplateBody')
|
||||||
|
template_url = self._get_param('TemplateURL')
|
||||||
|
if template_url:
|
||||||
|
template_body = self._get_stack_from_s3_url(template_url)
|
||||||
|
tags = dict((item['key'], item['value'])
|
||||||
|
for item in self._get_list_prefix("Tags.member"))
|
||||||
|
parameters_list = self._get_list_prefix("Parameters.member")
|
||||||
|
parameters = dict([
|
||||||
|
(parameter['parameter_key'], parameter['parameter_value'])
|
||||||
|
for parameter
|
||||||
|
in parameters_list
|
||||||
|
])
|
||||||
|
operation = self.cloudformation_backend.update_stack_set(
|
||||||
|
stackset_name=stackset_name,
|
||||||
|
template=template_body,
|
||||||
|
description=description,
|
||||||
|
parameters=parameters,
|
||||||
|
tags=tags,
|
||||||
|
admin_role=admin_role,
|
||||||
|
execution_role=execution_role,
|
||||||
|
accounts=accounts,
|
||||||
|
regions=regions,
|
||||||
|
operation_id=operation_id
|
||||||
|
)
|
||||||
|
|
||||||
|
template = self.response_template(UPDATE_STACK_SET_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(operation=operation)
|
||||||
|
|
||||||
|
def update_stack_instances(self):
|
||||||
|
stackset_name = self._get_param('StackSetName')
|
||||||
|
accounts = self._get_multi_param('Accounts.member')
|
||||||
|
regions = self._get_multi_param('Regions.member')
|
||||||
|
parameters = self._get_multi_param('ParameterOverrides.member')
|
||||||
|
operation = self.cloudformation_backend.get_stack_set(stackset_name).update_instances(accounts, regions, parameters)
|
||||||
|
template = self.response_template(UPDATE_STACK_INSTANCES_RESPONSE_TEMPLATE)
|
||||||
|
return template.render(operation=operation)
|
||||||
|
|
||||||
|
|
||||||
|
VALIDATE_STACK_RESPONSE_TEMPLATE = """<ValidateTemplateResponse>
|
||||||
|
<ValidateTemplateResult>
|
||||||
|
<Capabilities></Capabilities>
|
||||||
|
<CapabilitiesReason></CapabilitiesReason>
|
||||||
|
<DeclaredTransforms></DeclaredTransforms>
|
||||||
|
<Description>{{ description }}</Description>
|
||||||
|
<Parameters></Parameters>
|
||||||
|
</ValidateTemplateResult>
|
||||||
|
</ValidateTemplateResponse>"""
|
||||||
|
|
||||||
CREATE_STACK_RESPONSE_TEMPLATE = """<CreateStackResponse>
|
CREATE_STACK_RESPONSE_TEMPLATE = """<CreateStackResponse>
|
||||||
<CreateStackResult>
|
<CreateStackResult>
|
||||||
@ -326,6 +553,66 @@ CREATE_CHANGE_SET_RESPONSE_TEMPLATE = """<CreateStackResponse>
|
|||||||
</CreateStackResponse>
|
</CreateStackResponse>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
DELETE_CHANGE_SET_RESPONSE_TEMPLATE = """<DeleteChangeSetResponse>
|
||||||
|
<DeleteChangeSetResult>
|
||||||
|
</DeleteChangeSetResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>3d3200a1-810e-3023-6cc3-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</DeleteChangeSetResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
DESCRIBE_CHANGE_SET_RESPONSE_TEMPLATE = """<DescribeChangeSetResponse>
|
||||||
|
<DescribeChangeSetResult>
|
||||||
|
<ChangeSetId>{{ change_set.change_set_id }}</ChangeSetId>
|
||||||
|
<ChangeSetName>{{ change_set.change_set_name }}</ChangeSetName>
|
||||||
|
<StackId>{{ change_set.stack_id }}</StackId>
|
||||||
|
<StackName>{{ change_set.stack_name }}</StackName>
|
||||||
|
<Description>{{ change_set.description }}</Description>
|
||||||
|
<Parameters>
|
||||||
|
{% for param_name, param_value in change_set.stack_parameters.items() %}
|
||||||
|
<member>
|
||||||
|
<ParameterKey>{{ param_name }}</ParameterKey>
|
||||||
|
<ParameterValue>{{ param_value }}</ParameterValue>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Parameters>
|
||||||
|
<CreationTime>2011-05-23T15:47:44Z</CreationTime>
|
||||||
|
<ExecutionStatus>{{ change_set.execution_status }}</ExecutionStatus>
|
||||||
|
<Status>{{ change_set.status }}</Status>
|
||||||
|
<StatusReason>{{ change_set.status_reason }}</StatusReason>
|
||||||
|
{% if change_set.notification_arns %}
|
||||||
|
<NotificationARNs>
|
||||||
|
{% for notification_arn in change_set.notification_arns %}
|
||||||
|
<member>{{ notification_arn }}</member>
|
||||||
|
{% endfor %}
|
||||||
|
</NotificationARNs>
|
||||||
|
{% else %}
|
||||||
|
<NotificationARNs/>
|
||||||
|
{% endif %}
|
||||||
|
{% if change_set.role_arn %}
|
||||||
|
<RoleARN>{{ change_set.role_arn }}</RoleARN>
|
||||||
|
{% endif %}
|
||||||
|
{% if change_set.changes %}
|
||||||
|
<Changes>
|
||||||
|
{% for change in change_set.changes %}
|
||||||
|
<member>
|
||||||
|
<Type>Resource</Type>
|
||||||
|
<ResourceChange>
|
||||||
|
<Action>{{ change.action }}</Action>
|
||||||
|
<LogicalResourceId>{{ change.logical_resource_id }}</LogicalResourceId>
|
||||||
|
<ResourceType>{{ change.resource_type }}</ResourceType>
|
||||||
|
</ResourceChange>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Changes>
|
||||||
|
{% endif %}
|
||||||
|
{% if next_token %}
|
||||||
|
<NextToken>{{ next_token }}</NextToken>
|
||||||
|
{% endif %}
|
||||||
|
</DescribeChangeSetResult>
|
||||||
|
</DescribeChangeSetResponse>"""
|
||||||
|
|
||||||
EXECUTE_CHANGE_SET_RESPONSE_TEMPLATE = """<ExecuteChangeSetResponse>
|
EXECUTE_CHANGE_SET_RESPONSE_TEMPLATE = """<ExecuteChangeSetResponse>
|
||||||
<ExecuteChangeSetResult>
|
<ExecuteChangeSetResult>
|
||||||
<ExecuteChangeSetResult/>
|
<ExecuteChangeSetResult/>
|
||||||
@ -451,6 +738,27 @@ DESCRIBE_STACK_EVENTS_RESPONSE = """<DescribeStackEventsResponse xmlns="http://c
|
|||||||
</DescribeStackEventsResponse>"""
|
</DescribeStackEventsResponse>"""
|
||||||
|
|
||||||
|
|
||||||
|
LIST_CHANGE_SETS_RESPONSE = """<ListChangeSetsResponse>
|
||||||
|
<ListChangeSetsResult>
|
||||||
|
<Summaries>
|
||||||
|
{% for change_set in change_sets %}
|
||||||
|
<member>
|
||||||
|
<StackId>{{ change_set.stack_id }}</StackId>
|
||||||
|
<StackName>{{ change_set.stack_name }}</StackName>
|
||||||
|
<ChangeSetId>{{ change_set.change_set_id }}</ChangeSetId>
|
||||||
|
<ChangeSetName>{{ change_set.change_set_name }}</ChangeSetName>
|
||||||
|
<ExecutionStatus>{{ change_set.execution_status }}</ExecutionStatus>
|
||||||
|
<Status>{{ change_set.status }}</Status>
|
||||||
|
<StatusReason>{{ change_set.status_reason }}</StatusReason>
|
||||||
|
<CreationTime>2011-05-23T15:47:44Z</CreationTime>
|
||||||
|
<Description>{{ change_set.description }}</Description>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Summaries>
|
||||||
|
</ListChangeSetsResult>
|
||||||
|
</ListChangeSetsResponse>"""
|
||||||
|
|
||||||
|
|
||||||
LIST_STACKS_RESPONSE = """<ListStacksResponse>
|
LIST_STACKS_RESPONSE = """<ListStacksResponse>
|
||||||
<ListStacksResult>
|
<ListStacksResult>
|
||||||
<StackSummaries>
|
<StackSummaries>
|
||||||
@ -525,3 +833,236 @@ LIST_EXPORTS_RESPONSE = """<ListExportsResponse xmlns="http://cloudformation.ama
|
|||||||
<RequestId>5ccc7dcd-744c-11e5-be70-example</RequestId>
|
<RequestId>5ccc7dcd-744c-11e5-be70-example</RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</ListExportsResponse>"""
|
</ListExportsResponse>"""
|
||||||
|
|
||||||
|
CREATE_STACK_SET_RESPONSE_TEMPLATE = """<CreateStackSetResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<CreateStackSetResult>
|
||||||
|
<StackSetId>{{ stackset.stackset_id }}</StackSetId>
|
||||||
|
</CreateStackSetResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>f457258c-391d-41d1-861f-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</CreateStackSetResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
DESCRIBE_STACK_SET_RESPONSE_TEMPLATE = """<DescribeStackSetResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<DescribeStackSetResult>
|
||||||
|
<StackSet>
|
||||||
|
<Capabilities/>
|
||||||
|
<StackSetARN>{{ stackset.arn }}</StackSetARN>
|
||||||
|
<ExecutionRoleName>{{ stackset.execution_role }}</ExecutionRoleName>
|
||||||
|
<AdministrationRoleARN>{{ stackset.admin_role }}</AdministrationRoleARN>
|
||||||
|
<StackSetId>{{ stackset.id }}</StackSetId>
|
||||||
|
<TemplateBody>{{ stackset.template }}</TemplateBody>
|
||||||
|
<StackSetName>{{ stackset.name }}</StackSetName>
|
||||||
|
<Parameters>
|
||||||
|
{% for param_name, param_value in stackset.parameters.items() %}
|
||||||
|
<member>
|
||||||
|
<ParameterKey>{{ param_name }}</ParameterKey>
|
||||||
|
<ParameterValue>{{ param_value }}</ParameterValue>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Parameters>
|
||||||
|
<Tags>
|
||||||
|
{% for tag_key, tag_value in stackset.tags.items() %}
|
||||||
|
<member>
|
||||||
|
<Key>{{ tag_key }}</Key>
|
||||||
|
<Value>{{ tag_value }}</Value>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Tags>
|
||||||
|
<Status>{{ stackset.status }}</Status>
|
||||||
|
</StackSet>
|
||||||
|
</DescribeStackSetResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>d8b64e11-5332-46e1-9603-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</DescribeStackSetResponse>"""
|
||||||
|
|
||||||
|
DELETE_STACK_SET_RESPONSE_TEMPLATE = """<DeleteStackSetResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<DeleteStackSetResult/>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>c35ec2d0-d69f-4c4d-9bd7-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</DeleteStackSetResponse>"""
|
||||||
|
|
||||||
|
CREATE_STACK_INSTANCES_TEMPLATE = """<CreateStackInstancesResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<CreateStackInstancesResult>
|
||||||
|
<OperationId>1459ad6d-63cc-4c96-a73e-example</OperationId>
|
||||||
|
</CreateStackInstancesResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>6b29f7e3-69be-4d32-b374-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</CreateStackInstancesResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
LIST_STACK_INSTANCES_TEMPLATE = """<ListStackInstancesResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<ListStackInstancesResult>
|
||||||
|
<Summaries>
|
||||||
|
{% for instance in stackset.stack_instances %}
|
||||||
|
<member>
|
||||||
|
<StackId>{{ instance.StackId }}</StackId>
|
||||||
|
<StackSetId>{{ instance.StackSetId }}</StackSetId>
|
||||||
|
<Region>{{ instance.Region }}</Region>
|
||||||
|
<Account>{{ instance.Account }}</Account>
|
||||||
|
<Status>{{ instance.Status }}</Status>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Summaries>
|
||||||
|
</ListStackInstancesResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>83c27e73-b498-410f-993c-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</ListStackInstancesResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
DELETE_STACK_INSTANCES_TEMPLATE = """<DeleteStackInstancesResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<DeleteStackInstancesResult>
|
||||||
|
<OperationId>{{ operation.OperationId }}</OperationId>
|
||||||
|
</DeleteStackInstancesResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>e5325090-66f6-4ecd-a531-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</DeleteStackInstancesResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
DESCRIBE_STACK_INSTANCE_TEMPLATE = """<DescribeStackInstanceResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<DescribeStackInstanceResult>
|
||||||
|
<StackInstance>
|
||||||
|
<StackId>{{ instance.StackId }}</StackId>
|
||||||
|
<StackSetId>{{ instance.StackSetId }}</StackSetId>
|
||||||
|
{% if instance.ParameterOverrides %}
|
||||||
|
<ParameterOverrides>
|
||||||
|
{% for override in instance.ParameterOverrides %}
|
||||||
|
{% if override['ParameterKey'] or override['ParameterValue'] %}
|
||||||
|
<member>
|
||||||
|
<ParameterKey>{{ override.ParameterKey }}</ParameterKey>
|
||||||
|
<UsePreviousValue>false</UsePreviousValue>
|
||||||
|
<ParameterValue>{{ override.ParameterValue }}</ParameterValue>
|
||||||
|
</member>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</ParameterOverrides>
|
||||||
|
{% else %}
|
||||||
|
<ParameterOverrides/>
|
||||||
|
{% endif %}
|
||||||
|
<Region>{{ instance.Region }}</Region>
|
||||||
|
<Account>{{ instance.Account }}</Account>
|
||||||
|
<Status>{{ instance.Status }}</Status>
|
||||||
|
</StackInstance>
|
||||||
|
</DescribeStackInstanceResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>c6c7be10-0343-4319-8a25-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</DescribeStackInstanceResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
LIST_STACK_SETS_TEMPLATE = """<ListStackSetsResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<ListStackSetsResult>
|
||||||
|
<Summaries>
|
||||||
|
{% for key, value in stacksets.items() %}
|
||||||
|
<member>
|
||||||
|
<StackSetName>{{ value.name }}</StackSetName>
|
||||||
|
<StackSetId>{{ value.id }}</StackSetId>
|
||||||
|
<Status>{{ value.status }}</Status>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Summaries>
|
||||||
|
</ListStackSetsResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>4dcacb73-841e-4ed8-b335-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</ListStackSetsResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
UPDATE_STACK_INSTANCES_RESPONSE_TEMPLATE = """<UpdateStackInstancesResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<UpdateStackInstancesResult>
|
||||||
|
<OperationId>{{ operation }}</OperationId>
|
||||||
|
</UpdateStackInstancesResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>bdbf8e94-19b6-4ce4-af85-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</UpdateStackInstancesResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
UPDATE_STACK_SET_RESPONSE_TEMPLATE = """<UpdateStackSetResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<UpdateStackSetResult>
|
||||||
|
<OperationId>{{ operation.OperationId }}</OperationId>
|
||||||
|
</UpdateStackSetResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>adac907b-17e3-43e6-a254-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</UpdateStackSetResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
LIST_STACK_SET_OPERATIONS_RESPONSE_TEMPLATE = """<ListStackSetOperationsResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<ListStackSetOperationsResult>
|
||||||
|
<Summaries>
|
||||||
|
{% for operation in stackset.operations %}
|
||||||
|
<member>
|
||||||
|
<CreationTimestamp>{{ operation.CreationTimestamp }}</CreationTimestamp>
|
||||||
|
<OperationId>{{ operation.OperationId }}</OperationId>
|
||||||
|
<Action>{{ operation.Action }}</Action>
|
||||||
|
<EndTimestamp>{{ operation.EndTimestamp }}</EndTimestamp>
|
||||||
|
<Status>{{ operation.Status }}</Status>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Summaries>
|
||||||
|
</ListStackSetOperationsResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>65b9d9be-08bb-4a43-9a21-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</ListStackSetOperationsResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
STOP_STACK_SET_OPERATION_RESPONSE_TEMPLATE = """<StopStackSetOperationResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<StopStackSetOperationResult/>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>2188554a-07c6-4396-b2c5-example</RequestId>
|
||||||
|
</ResponseMetadata> </StopStackSetOperationResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
DESCRIBE_STACKSET_OPERATION_RESPONSE_TEMPLATE = """<DescribeStackSetOperationResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<DescribeStackSetOperationResult>
|
||||||
|
<StackSetOperation>
|
||||||
|
<ExecutionRoleName>{{ stackset.execution_role }}</ExecutionRoleName>
|
||||||
|
<AdministrationRoleARN>arn:aws:iam::123456789012:role/{{ stackset.admin_role }}</AdministrationRoleARN>
|
||||||
|
<StackSetId>{{ stackset.id }}</StackSetId>
|
||||||
|
<CreationTimestamp>{{ operation.CreationTimestamp }}</CreationTimestamp>
|
||||||
|
<OperationId>{{ operation.OperationId }}</OperationId>
|
||||||
|
<Action>{{ operation.Action }}</Action>
|
||||||
|
<OperationPreferences>
|
||||||
|
<RegionOrder/>
|
||||||
|
</OperationPreferences>
|
||||||
|
<EndTimestamp>{{ operation.EndTimestamp }}</EndTimestamp>
|
||||||
|
<Status>{{ operation.Status }}</Status>
|
||||||
|
</StackSetOperation>
|
||||||
|
</DescribeStackSetOperationResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>2edc27b6-9ce2-486a-a192-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</DescribeStackSetOperationResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
|
LIST_STACK_SET_OPERATION_RESULTS_RESPONSE_TEMPLATE = """<ListStackSetOperationResultsResponse xmlns="http://internal.amazon.com/coral/com.amazonaws.maestro.service.v20160713/">
|
||||||
|
<ListStackSetOperationResultsResult>
|
||||||
|
<Summaries>
|
||||||
|
{% for instance in operation.Instances %}
|
||||||
|
{% for account, region in instance.items() %}
|
||||||
|
<member>
|
||||||
|
<AccountGateResult>
|
||||||
|
<StatusReason>Function not found: arn:aws:lambda:us-west-2:123456789012:function:AWSCloudFormationStackSetAccountGate</StatusReason>
|
||||||
|
<Status>SKIPPED</Status>
|
||||||
|
</AccountGateResult>
|
||||||
|
<Region>{{ region }}</Region>
|
||||||
|
<Account>{{ account }}</Account>
|
||||||
|
<Status>{{ operation.Status }}</Status>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
</Summaries>
|
||||||
|
</ListStackSetOperationResultsResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>ac05a9ce-5f98-4197-a29b-example</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</ListStackSetOperationResultsResponse>
|
||||||
|
"""
|
||||||
|
@ -3,11 +3,14 @@ import uuid
|
|||||||
import six
|
import six
|
||||||
import random
|
import random
|
||||||
import yaml
|
import yaml
|
||||||
|
import os
|
||||||
|
|
||||||
|
from cfnlint import decode, core
|
||||||
|
|
||||||
|
|
||||||
def generate_stack_id(stack_name):
|
def generate_stack_id(stack_name, region="us-east-1", account="123456789"):
|
||||||
random_id = uuid.uuid4()
|
random_id = uuid.uuid4()
|
||||||
return "arn:aws:cloudformation:us-east-1:123456789:stack/{0}/{1}".format(stack_name, random_id)
|
return "arn:aws:cloudformation:{}:{}:stack/{}/{}".format(region, account, stack_name, random_id)
|
||||||
|
|
||||||
|
|
||||||
def generate_changeset_id(changeset_name, region_name):
|
def generate_changeset_id(changeset_name, region_name):
|
||||||
@ -15,6 +18,15 @@ def generate_changeset_id(changeset_name, region_name):
|
|||||||
return 'arn:aws:cloudformation:{0}:123456789:changeSet/{1}/{2}'.format(region_name, changeset_name, random_id)
|
return 'arn:aws:cloudformation:{0}:123456789:changeSet/{1}/{2}'.format(region_name, changeset_name, random_id)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_stackset_id(stackset_name):
|
||||||
|
random_id = uuid.uuid4()
|
||||||
|
return '{}:{}'.format(stackset_name, random_id)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_stackset_arn(stackset_id, region_name):
|
||||||
|
return 'arn:aws:cloudformation:{}:123456789012:stackset/{}'.format(region_name, stackset_id)
|
||||||
|
|
||||||
|
|
||||||
def random_suffix():
|
def random_suffix():
|
||||||
size = 12
|
size = 12
|
||||||
chars = list(range(10)) + ['A-Z']
|
chars = list(range(10)) + ['A-Z']
|
||||||
@ -38,3 +50,33 @@ def yaml_tag_constructor(loader, tag, node):
|
|||||||
key = 'Fn::{}'.format(tag[1:])
|
key = 'Fn::{}'.format(tag[1:])
|
||||||
|
|
||||||
return {key: _f(loader, tag, node)}
|
return {key: _f(loader, tag, node)}
|
||||||
|
|
||||||
|
|
||||||
|
def validate_template_cfn_lint(template):
|
||||||
|
|
||||||
|
# Save the template to a temporary file -- cfn-lint requires a file
|
||||||
|
filename = "file.tmp"
|
||||||
|
with open(filename, "w") as file:
|
||||||
|
file.write(template)
|
||||||
|
abs_filename = os.path.abspath(filename)
|
||||||
|
|
||||||
|
# decode handles both yaml and json
|
||||||
|
template, matches = decode.decode(abs_filename, False)
|
||||||
|
|
||||||
|
# Set cfn-lint to info
|
||||||
|
core.configure_logging(None)
|
||||||
|
|
||||||
|
# Initialize the ruleset to be applied (no overrules, no excludes)
|
||||||
|
rules = core.get_rules([], [], [])
|
||||||
|
|
||||||
|
# Use us-east-1 region (spec file) for validation
|
||||||
|
regions = ['us-east-1']
|
||||||
|
|
||||||
|
# Process all the rules and gather the errors
|
||||||
|
matches = core.run_checks(
|
||||||
|
abs_filename,
|
||||||
|
template,
|
||||||
|
rules,
|
||||||
|
regions)
|
||||||
|
|
||||||
|
return matches
|
||||||
|
@ -24,6 +24,16 @@ class UserNotFoundError(BadRequest):
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class GroupExistsException(BadRequest):
|
||||||
|
|
||||||
|
def __init__(self, message):
|
||||||
|
super(GroupExistsException, self).__init__()
|
||||||
|
self.description = json.dumps({
|
||||||
|
"message": message,
|
||||||
|
'__type': 'GroupExistsException',
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
class NotAuthorizedError(BadRequest):
|
class NotAuthorizedError(BadRequest):
|
||||||
|
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import functools
|
||||||
|
import itertools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
@ -11,8 +13,7 @@ from jose import jws
|
|||||||
|
|
||||||
from moto.compat import OrderedDict
|
from moto.compat import OrderedDict
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
from .exceptions import NotAuthorizedError, ResourceNotFoundError, UserNotFoundError
|
from .exceptions import GroupExistsException, NotAuthorizedError, ResourceNotFoundError, UserNotFoundError
|
||||||
|
|
||||||
|
|
||||||
UserStatus = {
|
UserStatus = {
|
||||||
"FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD",
|
"FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD",
|
||||||
@ -20,6 +21,39 @@ UserStatus = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def paginate(limit, start_arg="next_token", limit_arg="max_results"):
|
||||||
|
"""Returns a limited result list, and an offset into list of remaining items
|
||||||
|
|
||||||
|
Takes the next_token, and max_results kwargs given to a function and handles
|
||||||
|
the slicing of the results. The kwarg `next_token` is the offset into the
|
||||||
|
list to begin slicing from. `max_results` is the size of the result required
|
||||||
|
|
||||||
|
If the max_results is not supplied then the `limit` parameter is used as a
|
||||||
|
default
|
||||||
|
|
||||||
|
:param limit_arg: the name of argument in the decorated function that
|
||||||
|
controls amount of items returned
|
||||||
|
:param start_arg: the name of the argument in the decorated that provides
|
||||||
|
the starting offset
|
||||||
|
:param limit: A default maximum items to return
|
||||||
|
:return: a tuple containing a list of items, and the offset into the list
|
||||||
|
"""
|
||||||
|
default_start = 0
|
||||||
|
|
||||||
|
def outer_wrapper(func):
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
start = int(default_start if kwargs.get(start_arg) is None else kwargs[start_arg])
|
||||||
|
lim = int(limit if kwargs.get(limit_arg) is None else kwargs[limit_arg])
|
||||||
|
stop = start + lim
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
limited_results = list(itertools.islice(result, start, stop))
|
||||||
|
next_token = stop if stop < len(result) else None
|
||||||
|
return limited_results, next_token
|
||||||
|
return wrapper
|
||||||
|
return outer_wrapper
|
||||||
|
|
||||||
|
|
||||||
class CognitoIdpUserPool(BaseModel):
|
class CognitoIdpUserPool(BaseModel):
|
||||||
|
|
||||||
def __init__(self, region, name, extended_config):
|
def __init__(self, region, name, extended_config):
|
||||||
@ -33,6 +67,7 @@ class CognitoIdpUserPool(BaseModel):
|
|||||||
|
|
||||||
self.clients = OrderedDict()
|
self.clients = OrderedDict()
|
||||||
self.identity_providers = OrderedDict()
|
self.identity_providers = OrderedDict()
|
||||||
|
self.groups = OrderedDict()
|
||||||
self.users = OrderedDict()
|
self.users = OrderedDict()
|
||||||
self.refresh_tokens = {}
|
self.refresh_tokens = {}
|
||||||
self.access_tokens = {}
|
self.access_tokens = {}
|
||||||
@ -185,6 +220,33 @@ class CognitoIdpIdentityProvider(BaseModel):
|
|||||||
return identity_provider_json
|
return identity_provider_json
|
||||||
|
|
||||||
|
|
||||||
|
class CognitoIdpGroup(BaseModel):
|
||||||
|
|
||||||
|
def __init__(self, user_pool_id, group_name, description, role_arn, precedence):
|
||||||
|
self.user_pool_id = user_pool_id
|
||||||
|
self.group_name = group_name
|
||||||
|
self.description = description or ""
|
||||||
|
self.role_arn = role_arn
|
||||||
|
self.precedence = precedence
|
||||||
|
self.last_modified_date = datetime.datetime.now()
|
||||||
|
self.creation_date = self.last_modified_date
|
||||||
|
|
||||||
|
# Users who are members of this group.
|
||||||
|
# Note that these links are bidirectional.
|
||||||
|
self.users = set()
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
return {
|
||||||
|
"GroupName": self.group_name,
|
||||||
|
"UserPoolId": self.user_pool_id,
|
||||||
|
"Description": self.description,
|
||||||
|
"RoleArn": self.role_arn,
|
||||||
|
"Precedence": self.precedence,
|
||||||
|
"LastModifiedDate": time.mktime(self.last_modified_date.timetuple()),
|
||||||
|
"CreationDate": time.mktime(self.creation_date.timetuple()),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class CognitoIdpUser(BaseModel):
|
class CognitoIdpUser(BaseModel):
|
||||||
|
|
||||||
def __init__(self, user_pool_id, username, password, status, attributes):
|
def __init__(self, user_pool_id, username, password, status, attributes):
|
||||||
@ -198,6 +260,10 @@ class CognitoIdpUser(BaseModel):
|
|||||||
self.create_date = datetime.datetime.utcnow()
|
self.create_date = datetime.datetime.utcnow()
|
||||||
self.last_modified_date = datetime.datetime.utcnow()
|
self.last_modified_date = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
# Groups this user is a member of.
|
||||||
|
# Note that these links are bidirectional.
|
||||||
|
self.groups = set()
|
||||||
|
|
||||||
def _base_json(self):
|
def _base_json(self):
|
||||||
return {
|
return {
|
||||||
"UserPoolId": self.user_pool_id,
|
"UserPoolId": self.user_pool_id,
|
||||||
@ -242,7 +308,8 @@ class CognitoIdpBackend(BaseBackend):
|
|||||||
self.user_pools[user_pool.id] = user_pool
|
self.user_pools[user_pool.id] = user_pool
|
||||||
return user_pool
|
return user_pool
|
||||||
|
|
||||||
def list_user_pools(self):
|
@paginate(60)
|
||||||
|
def list_user_pools(self, max_results=None, next_token=None):
|
||||||
return self.user_pools.values()
|
return self.user_pools.values()
|
||||||
|
|
||||||
def describe_user_pool(self, user_pool_id):
|
def describe_user_pool(self, user_pool_id):
|
||||||
@ -289,7 +356,8 @@ class CognitoIdpBackend(BaseBackend):
|
|||||||
user_pool.clients[user_pool_client.id] = user_pool_client
|
user_pool.clients[user_pool_client.id] = user_pool_client
|
||||||
return user_pool_client
|
return user_pool_client
|
||||||
|
|
||||||
def list_user_pool_clients(self, user_pool_id):
|
@paginate(60)
|
||||||
|
def list_user_pool_clients(self, user_pool_id, max_results=None, next_token=None):
|
||||||
user_pool = self.user_pools.get(user_pool_id)
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
if not user_pool:
|
if not user_pool:
|
||||||
raise ResourceNotFoundError(user_pool_id)
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
@ -339,7 +407,8 @@ class CognitoIdpBackend(BaseBackend):
|
|||||||
user_pool.identity_providers[name] = identity_provider
|
user_pool.identity_providers[name] = identity_provider
|
||||||
return identity_provider
|
return identity_provider
|
||||||
|
|
||||||
def list_identity_providers(self, user_pool_id):
|
@paginate(60)
|
||||||
|
def list_identity_providers(self, user_pool_id, max_results=None, next_token=None):
|
||||||
user_pool = self.user_pools.get(user_pool_id)
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
if not user_pool:
|
if not user_pool:
|
||||||
raise ResourceNotFoundError(user_pool_id)
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
@ -357,6 +426,19 @@ class CognitoIdpBackend(BaseBackend):
|
|||||||
|
|
||||||
return identity_provider
|
return identity_provider
|
||||||
|
|
||||||
|
def update_identity_provider(self, user_pool_id, name, extended_config):
|
||||||
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
|
if not user_pool:
|
||||||
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
|
|
||||||
|
identity_provider = user_pool.identity_providers.get(name)
|
||||||
|
if not identity_provider:
|
||||||
|
raise ResourceNotFoundError(name)
|
||||||
|
|
||||||
|
identity_provider.extended_config.update(extended_config)
|
||||||
|
|
||||||
|
return identity_provider
|
||||||
|
|
||||||
def delete_identity_provider(self, user_pool_id, name):
|
def delete_identity_provider(self, user_pool_id, name):
|
||||||
user_pool = self.user_pools.get(user_pool_id)
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
if not user_pool:
|
if not user_pool:
|
||||||
@ -367,6 +449,72 @@ class CognitoIdpBackend(BaseBackend):
|
|||||||
|
|
||||||
del user_pool.identity_providers[name]
|
del user_pool.identity_providers[name]
|
||||||
|
|
||||||
|
# Group
|
||||||
|
def create_group(self, user_pool_id, group_name, description, role_arn, precedence):
|
||||||
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
|
if not user_pool:
|
||||||
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
|
|
||||||
|
group = CognitoIdpGroup(user_pool_id, group_name, description, role_arn, precedence)
|
||||||
|
if group.group_name in user_pool.groups:
|
||||||
|
raise GroupExistsException("A group with the name already exists")
|
||||||
|
user_pool.groups[group.group_name] = group
|
||||||
|
|
||||||
|
return group
|
||||||
|
|
||||||
|
def get_group(self, user_pool_id, group_name):
|
||||||
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
|
if not user_pool:
|
||||||
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
|
|
||||||
|
if group_name not in user_pool.groups:
|
||||||
|
raise ResourceNotFoundError(group_name)
|
||||||
|
|
||||||
|
return user_pool.groups[group_name]
|
||||||
|
|
||||||
|
def list_groups(self, user_pool_id):
|
||||||
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
|
if not user_pool:
|
||||||
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
|
|
||||||
|
return user_pool.groups.values()
|
||||||
|
|
||||||
|
def delete_group(self, user_pool_id, group_name):
|
||||||
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
|
if not user_pool:
|
||||||
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
|
|
||||||
|
if group_name not in user_pool.groups:
|
||||||
|
raise ResourceNotFoundError(group_name)
|
||||||
|
|
||||||
|
group = user_pool.groups[group_name]
|
||||||
|
for user in group.users:
|
||||||
|
user.groups.remove(group)
|
||||||
|
|
||||||
|
del user_pool.groups[group_name]
|
||||||
|
|
||||||
|
def admin_add_user_to_group(self, user_pool_id, group_name, username):
|
||||||
|
group = self.get_group(user_pool_id, group_name)
|
||||||
|
user = self.admin_get_user(user_pool_id, username)
|
||||||
|
|
||||||
|
group.users.add(user)
|
||||||
|
user.groups.add(group)
|
||||||
|
|
||||||
|
def list_users_in_group(self, user_pool_id, group_name):
|
||||||
|
group = self.get_group(user_pool_id, group_name)
|
||||||
|
return list(group.users)
|
||||||
|
|
||||||
|
def admin_list_groups_for_user(self, user_pool_id, username):
|
||||||
|
user = self.admin_get_user(user_pool_id, username)
|
||||||
|
return list(user.groups)
|
||||||
|
|
||||||
|
def admin_remove_user_from_group(self, user_pool_id, group_name, username):
|
||||||
|
group = self.get_group(user_pool_id, group_name)
|
||||||
|
user = self.admin_get_user(user_pool_id, username)
|
||||||
|
|
||||||
|
group.users.discard(user)
|
||||||
|
user.groups.discard(group)
|
||||||
|
|
||||||
# User
|
# User
|
||||||
def admin_create_user(self, user_pool_id, username, temporary_password, attributes):
|
def admin_create_user(self, user_pool_id, username, temporary_password, attributes):
|
||||||
user_pool = self.user_pools.get(user_pool_id)
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
@ -387,7 +535,8 @@ class CognitoIdpBackend(BaseBackend):
|
|||||||
|
|
||||||
return user_pool.users[username]
|
return user_pool.users[username]
|
||||||
|
|
||||||
def list_users(self, user_pool_id):
|
@paginate(60, "pagination_token", "limit")
|
||||||
|
def list_users(self, user_pool_id, pagination_token=None, limit=None):
|
||||||
user_pool = self.user_pools.get(user_pool_id)
|
user_pool = self.user_pools.get(user_pool_id)
|
||||||
if not user_pool:
|
if not user_pool:
|
||||||
raise ResourceNotFoundError(user_pool_id)
|
raise ResourceNotFoundError(user_pool_id)
|
||||||
@ -410,6 +559,10 @@ class CognitoIdpBackend(BaseBackend):
|
|||||||
if username not in user_pool.users:
|
if username not in user_pool.users:
|
||||||
raise UserNotFoundError(username)
|
raise UserNotFoundError(username)
|
||||||
|
|
||||||
|
user = user_pool.users[username]
|
||||||
|
for group in user.groups:
|
||||||
|
group.users.remove(user)
|
||||||
|
|
||||||
del user_pool.users[username]
|
del user_pool.users[username]
|
||||||
|
|
||||||
def _log_user_in(self, user_pool, client, username):
|
def _log_user_in(self, user_pool, client, username):
|
||||||
|
@ -22,10 +22,17 @@ class CognitoIdpResponse(BaseResponse):
|
|||||||
})
|
})
|
||||||
|
|
||||||
def list_user_pools(self):
|
def list_user_pools(self):
|
||||||
user_pools = cognitoidp_backends[self.region].list_user_pools()
|
max_results = self._get_param("MaxResults")
|
||||||
return json.dumps({
|
next_token = self._get_param("NextToken", "0")
|
||||||
"UserPools": [user_pool.to_json() for user_pool in user_pools]
|
user_pools, next_token = cognitoidp_backends[self.region].list_user_pools(
|
||||||
})
|
max_results=max_results, next_token=next_token
|
||||||
|
)
|
||||||
|
response = {
|
||||||
|
"UserPools": [user_pool.to_json() for user_pool in user_pools],
|
||||||
|
}
|
||||||
|
if next_token:
|
||||||
|
response["NextToken"] = str(next_token)
|
||||||
|
return json.dumps(response)
|
||||||
|
|
||||||
def describe_user_pool(self):
|
def describe_user_pool(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
@ -72,10 +79,16 @@ class CognitoIdpResponse(BaseResponse):
|
|||||||
|
|
||||||
def list_user_pool_clients(self):
|
def list_user_pool_clients(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
user_pool_clients = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id)
|
max_results = self._get_param("MaxResults")
|
||||||
return json.dumps({
|
next_token = self._get_param("NextToken", "0")
|
||||||
|
user_pool_clients, next_token = cognitoidp_backends[self.region].list_user_pool_clients(user_pool_id,
|
||||||
|
max_results=max_results, next_token=next_token)
|
||||||
|
response = {
|
||||||
"UserPoolClients": [user_pool_client.to_json() for user_pool_client in user_pool_clients]
|
"UserPoolClients": [user_pool_client.to_json() for user_pool_client in user_pool_clients]
|
||||||
})
|
}
|
||||||
|
if next_token:
|
||||||
|
response["NextToken"] = str(next_token)
|
||||||
|
return json.dumps(response)
|
||||||
|
|
||||||
def describe_user_pool_client(self):
|
def describe_user_pool_client(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
@ -110,10 +123,17 @@ class CognitoIdpResponse(BaseResponse):
|
|||||||
|
|
||||||
def list_identity_providers(self):
|
def list_identity_providers(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
identity_providers = cognitoidp_backends[self.region].list_identity_providers(user_pool_id)
|
max_results = self._get_param("MaxResults")
|
||||||
return json.dumps({
|
next_token = self._get_param("NextToken", "0")
|
||||||
|
identity_providers, next_token = cognitoidp_backends[self.region].list_identity_providers(
|
||||||
|
user_pool_id, max_results=max_results, next_token=next_token
|
||||||
|
)
|
||||||
|
response = {
|
||||||
"Providers": [identity_provider.to_json() for identity_provider in identity_providers]
|
"Providers": [identity_provider.to_json() for identity_provider in identity_providers]
|
||||||
})
|
}
|
||||||
|
if next_token:
|
||||||
|
response["NextToken"] = str(next_token)
|
||||||
|
return json.dumps(response)
|
||||||
|
|
||||||
def describe_identity_provider(self):
|
def describe_identity_provider(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
@ -123,12 +143,103 @@ class CognitoIdpResponse(BaseResponse):
|
|||||||
"IdentityProvider": identity_provider.to_json(extended=True)
|
"IdentityProvider": identity_provider.to_json(extended=True)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
def update_identity_provider(self):
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
name = self._get_param("ProviderName")
|
||||||
|
identity_provider = cognitoidp_backends[self.region].update_identity_provider(user_pool_id, name, self.parameters)
|
||||||
|
return json.dumps({
|
||||||
|
"IdentityProvider": identity_provider.to_json(extended=True)
|
||||||
|
})
|
||||||
|
|
||||||
def delete_identity_provider(self):
|
def delete_identity_provider(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
name = self._get_param("ProviderName")
|
name = self._get_param("ProviderName")
|
||||||
cognitoidp_backends[self.region].delete_identity_provider(user_pool_id, name)
|
cognitoidp_backends[self.region].delete_identity_provider(user_pool_id, name)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
# Group
|
||||||
|
def create_group(self):
|
||||||
|
group_name = self._get_param("GroupName")
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
description = self._get_param("Description")
|
||||||
|
role_arn = self._get_param("RoleArn")
|
||||||
|
precedence = self._get_param("Precedence")
|
||||||
|
|
||||||
|
group = cognitoidp_backends[self.region].create_group(
|
||||||
|
user_pool_id,
|
||||||
|
group_name,
|
||||||
|
description,
|
||||||
|
role_arn,
|
||||||
|
precedence,
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.dumps({
|
||||||
|
"Group": group.to_json(),
|
||||||
|
})
|
||||||
|
|
||||||
|
def get_group(self):
|
||||||
|
group_name = self._get_param("GroupName")
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
group = cognitoidp_backends[self.region].get_group(user_pool_id, group_name)
|
||||||
|
return json.dumps({
|
||||||
|
"Group": group.to_json(),
|
||||||
|
})
|
||||||
|
|
||||||
|
def list_groups(self):
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
groups = cognitoidp_backends[self.region].list_groups(user_pool_id)
|
||||||
|
return json.dumps({
|
||||||
|
"Groups": [group.to_json() for group in groups],
|
||||||
|
})
|
||||||
|
|
||||||
|
def delete_group(self):
|
||||||
|
group_name = self._get_param("GroupName")
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
cognitoidp_backends[self.region].delete_group(user_pool_id, group_name)
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def admin_add_user_to_group(self):
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
username = self._get_param("Username")
|
||||||
|
group_name = self._get_param("GroupName")
|
||||||
|
|
||||||
|
cognitoidp_backends[self.region].admin_add_user_to_group(
|
||||||
|
user_pool_id,
|
||||||
|
group_name,
|
||||||
|
username,
|
||||||
|
)
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def list_users_in_group(self):
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
group_name = self._get_param("GroupName")
|
||||||
|
users = cognitoidp_backends[self.region].list_users_in_group(user_pool_id, group_name)
|
||||||
|
return json.dumps({
|
||||||
|
"Users": [user.to_json(extended=True) for user in users],
|
||||||
|
})
|
||||||
|
|
||||||
|
def admin_list_groups_for_user(self):
|
||||||
|
username = self._get_param("Username")
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
groups = cognitoidp_backends[self.region].admin_list_groups_for_user(user_pool_id, username)
|
||||||
|
return json.dumps({
|
||||||
|
"Groups": [group.to_json() for group in groups],
|
||||||
|
})
|
||||||
|
|
||||||
|
def admin_remove_user_from_group(self):
|
||||||
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
username = self._get_param("Username")
|
||||||
|
group_name = self._get_param("GroupName")
|
||||||
|
|
||||||
|
cognitoidp_backends[self.region].admin_remove_user_from_group(
|
||||||
|
user_pool_id,
|
||||||
|
group_name,
|
||||||
|
username,
|
||||||
|
)
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
# User
|
# User
|
||||||
def admin_create_user(self):
|
def admin_create_user(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
@ -155,10 +266,15 @@ class CognitoIdpResponse(BaseResponse):
|
|||||||
|
|
||||||
def list_users(self):
|
def list_users(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
users = cognitoidp_backends[self.region].list_users(user_pool_id)
|
limit = self._get_param("Limit")
|
||||||
return json.dumps({
|
token = self._get_param("PaginationToken")
|
||||||
"Users": [user.to_json(extended=True) for user in users]
|
users, token = cognitoidp_backends[self.region].list_users(user_pool_id,
|
||||||
})
|
limit=limit,
|
||||||
|
pagination_token=token)
|
||||||
|
response = {"Users": [user.to_json(extended=True) for user in users]}
|
||||||
|
if token:
|
||||||
|
response["PaginationToken"] = str(token)
|
||||||
|
return json.dumps(response)
|
||||||
|
|
||||||
def admin_disable_user(self):
|
def admin_disable_user(self):
|
||||||
user_pool_id = self._get_param("UserPoolId")
|
user_pool_id = self._get_param("UserPoolId")
|
||||||
|
4
moto/config/__init__.py
Normal file
4
moto/config/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from .models import config_backends
|
||||||
|
from ..core.models import base_decorator
|
||||||
|
|
||||||
|
mock_config = base_decorator(config_backends)
|
149
moto/config/exceptions.py
Normal file
149
moto/config/exceptions.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
from moto.core.exceptions import JsonRESTError
|
||||||
|
|
||||||
|
|
||||||
|
class NameTooLongException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, name, location):
|
||||||
|
message = '1 validation error detected: Value \'{name}\' at \'{location}\' failed to satisfy' \
|
||||||
|
' constraint: Member must have length less than or equal to 256'.format(name=name, location=location)
|
||||||
|
super(NameTooLongException, self).__init__("ValidationException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidConfigurationRecorderNameException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
message = 'The configuration recorder name \'{name}\' is not valid, blank string.'.format(name=name)
|
||||||
|
super(InvalidConfigurationRecorderNameException, self).__init__("InvalidConfigurationRecorderNameException",
|
||||||
|
message)
|
||||||
|
|
||||||
|
|
||||||
|
class MaxNumberOfConfigurationRecordersExceededException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
message = 'Failed to put configuration recorder \'{name}\' because the maximum number of ' \
|
||||||
|
'configuration recorders: 1 is reached.'.format(name=name)
|
||||||
|
super(MaxNumberOfConfigurationRecordersExceededException, self).__init__(
|
||||||
|
"MaxNumberOfConfigurationRecordersExceededException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidRecordingGroupException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
message = 'The recording group provided is not valid'
|
||||||
|
super(InvalidRecordingGroupException, self).__init__("InvalidRecordingGroupException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidResourceTypeException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, bad_list, good_list):
|
||||||
|
message = '{num} validation error detected: Value \'{bad_list}\' at ' \
|
||||||
|
'\'configurationRecorder.recordingGroup.resourceTypes\' failed to satisfy constraint: ' \
|
||||||
|
'Member must satisfy constraint: [Member must satisfy enum value set: {good_list}]'.format(
|
||||||
|
num=len(bad_list), bad_list=bad_list, good_list=good_list)
|
||||||
|
# For PY2:
|
||||||
|
message = str(message)
|
||||||
|
|
||||||
|
super(InvalidResourceTypeException, self).__init__("ValidationException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class NoSuchConfigurationRecorderException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
message = 'Cannot find configuration recorder with the specified name \'{name}\'.'.format(name=name)
|
||||||
|
super(NoSuchConfigurationRecorderException, self).__init__("NoSuchConfigurationRecorderException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidDeliveryChannelNameException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
message = 'The delivery channel name \'{name}\' is not valid, blank string.'.format(name=name)
|
||||||
|
super(InvalidDeliveryChannelNameException, self).__init__("InvalidDeliveryChannelNameException",
|
||||||
|
message)
|
||||||
|
|
||||||
|
|
||||||
|
class NoSuchBucketException(JsonRESTError):
|
||||||
|
"""We are *only* validating that there is value that is not '' here."""
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
message = 'Cannot find a S3 bucket with an empty bucket name.'
|
||||||
|
super(NoSuchBucketException, self).__init__("NoSuchBucketException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidS3KeyPrefixException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
message = 'The s3 key prefix \'\' is not valid, empty s3 key prefix.'
|
||||||
|
super(InvalidS3KeyPrefixException, self).__init__("InvalidS3KeyPrefixException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSNSTopicARNException(JsonRESTError):
|
||||||
|
"""We are *only* validating that there is value that is not '' here."""
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
message = 'The sns topic arn \'\' is not valid.'
|
||||||
|
super(InvalidSNSTopicARNException, self).__init__("InvalidSNSTopicARNException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidDeliveryFrequency(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, value, good_list):
|
||||||
|
message = '1 validation error detected: Value \'{value}\' at ' \
|
||||||
|
'\'deliveryChannel.configSnapshotDeliveryProperties.deliveryFrequency\' failed to satisfy ' \
|
||||||
|
'constraint: Member must satisfy enum value set: {good_list}'.format(value=value, good_list=good_list)
|
||||||
|
super(InvalidDeliveryFrequency, self).__init__("InvalidDeliveryFrequency", message)
|
||||||
|
|
||||||
|
|
||||||
|
class MaxNumberOfDeliveryChannelsExceededException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
message = 'Failed to put delivery channel \'{name}\' because the maximum number of ' \
|
||||||
|
'delivery channels: 1 is reached.'.format(name=name)
|
||||||
|
super(MaxNumberOfDeliveryChannelsExceededException, self).__init__(
|
||||||
|
"MaxNumberOfDeliveryChannelsExceededException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class NoSuchDeliveryChannelException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
message = 'Cannot find delivery channel with specified name \'{name}\'.'.format(name=name)
|
||||||
|
super(NoSuchDeliveryChannelException, self).__init__("NoSuchDeliveryChannelException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class NoAvailableConfigurationRecorderException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
message = 'Configuration recorder is not available to put delivery channel.'
|
||||||
|
super(NoAvailableConfigurationRecorderException, self).__init__("NoAvailableConfigurationRecorderException",
|
||||||
|
message)
|
||||||
|
|
||||||
|
|
||||||
|
class NoAvailableDeliveryChannelException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
message = 'Delivery channel is not available to start configuration recorder.'
|
||||||
|
super(NoAvailableDeliveryChannelException, self).__init__("NoAvailableDeliveryChannelException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class LastDeliveryChannelDeleteFailedException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
message = 'Failed to delete last specified delivery channel with name \'{name}\', because there, ' \
|
||||||
|
'because there is a running configuration recorder.'.format(name=name)
|
||||||
|
super(LastDeliveryChannelDeleteFailedException, self).__init__("LastDeliveryChannelDeleteFailedException", message)
|
335
moto/config/models.py
Normal file
335
moto/config/models.py
Normal file
@ -0,0 +1,335 @@
|
|||||||
|
import json
|
||||||
|
import time
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from boto3 import Session
|
||||||
|
|
||||||
|
from moto.config.exceptions import InvalidResourceTypeException, InvalidDeliveryFrequency, \
|
||||||
|
InvalidConfigurationRecorderNameException, NameTooLongException, \
|
||||||
|
MaxNumberOfConfigurationRecordersExceededException, InvalidRecordingGroupException, \
|
||||||
|
NoSuchConfigurationRecorderException, NoAvailableConfigurationRecorderException, \
|
||||||
|
InvalidDeliveryChannelNameException, NoSuchBucketException, InvalidS3KeyPrefixException, \
|
||||||
|
InvalidSNSTopicARNException, MaxNumberOfDeliveryChannelsExceededException, NoAvailableDeliveryChannelException, \
|
||||||
|
NoSuchDeliveryChannelException, LastDeliveryChannelDeleteFailedException
|
||||||
|
|
||||||
|
from moto.core import BaseBackend, BaseModel
|
||||||
|
|
||||||
|
DEFAULT_ACCOUNT_ID = 123456789012
|
||||||
|
|
||||||
|
|
||||||
|
def datetime2int(date):
|
||||||
|
return int(time.mktime(date.timetuple()))
|
||||||
|
|
||||||
|
|
||||||
|
def snake_to_camels(original):
|
||||||
|
parts = original.split('_')
|
||||||
|
|
||||||
|
camel_cased = parts[0].lower() + ''.join(p.title() for p in parts[1:])
|
||||||
|
camel_cased = camel_cased.replace('Arn', 'ARN') # Config uses 'ARN' instead of 'Arn'
|
||||||
|
|
||||||
|
return camel_cased
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigEmptyDictable(BaseModel):
|
||||||
|
"""Base class to make serialization easy. This assumes that the sub-class will NOT return 'None's in the JSON."""
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
data = {}
|
||||||
|
for item, value in self.__dict__.items():
|
||||||
|
if value is not None:
|
||||||
|
if isinstance(value, ConfigEmptyDictable):
|
||||||
|
data[snake_to_camels(item)] = value.to_dict()
|
||||||
|
else:
|
||||||
|
data[snake_to_camels(item)] = value
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigRecorderStatus(ConfigEmptyDictable):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
self.recording = False
|
||||||
|
self.last_start_time = None
|
||||||
|
self.last_stop_time = None
|
||||||
|
self.last_status = None
|
||||||
|
self.last_error_code = None
|
||||||
|
self.last_error_message = None
|
||||||
|
self.last_status_change_time = None
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.recording = True
|
||||||
|
self.last_status = 'PENDING'
|
||||||
|
self.last_start_time = datetime2int(datetime.utcnow())
|
||||||
|
self.last_status_change_time = datetime2int(datetime.utcnow())
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.recording = False
|
||||||
|
self.last_stop_time = datetime2int(datetime.utcnow())
|
||||||
|
self.last_status_change_time = datetime2int(datetime.utcnow())
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigDeliverySnapshotProperties(ConfigEmptyDictable):
|
||||||
|
|
||||||
|
def __init__(self, delivery_frequency):
|
||||||
|
self.delivery_frequency = delivery_frequency
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigDeliveryChannel(ConfigEmptyDictable):
|
||||||
|
|
||||||
|
def __init__(self, name, s3_bucket_name, prefix=None, sns_arn=None, snapshot_properties=None):
|
||||||
|
self.name = name
|
||||||
|
self.s3_bucket_name = s3_bucket_name
|
||||||
|
self.s3_key_prefix = prefix
|
||||||
|
self.sns_topic_arn = sns_arn
|
||||||
|
self.config_snapshot_delivery_properties = snapshot_properties
|
||||||
|
|
||||||
|
|
||||||
|
class RecordingGroup(ConfigEmptyDictable):
|
||||||
|
|
||||||
|
def __init__(self, all_supported=True, include_global_resource_types=False, resource_types=None):
|
||||||
|
self.all_supported = all_supported
|
||||||
|
self.include_global_resource_types = include_global_resource_types
|
||||||
|
self.resource_types = resource_types
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigRecorder(ConfigEmptyDictable):
|
||||||
|
|
||||||
|
def __init__(self, role_arn, recording_group, name='default', status=None):
|
||||||
|
self.name = name
|
||||||
|
self.role_arn = role_arn
|
||||||
|
self.recording_group = recording_group
|
||||||
|
|
||||||
|
if not status:
|
||||||
|
self.status = ConfigRecorderStatus(name)
|
||||||
|
else:
|
||||||
|
self.status = status
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigBackend(BaseBackend):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.recorders = {}
|
||||||
|
self.delivery_channels = {}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_resource_types(resource_list):
|
||||||
|
# Load the service file:
|
||||||
|
resource_package = 'botocore'
|
||||||
|
resource_path = '/'.join(('data', 'config', '2014-11-12', 'service-2.json'))
|
||||||
|
conifg_schema = json.loads(pkg_resources.resource_string(resource_package, resource_path))
|
||||||
|
|
||||||
|
# Verify that each entry exists in the supported list:
|
||||||
|
bad_list = []
|
||||||
|
for resource in resource_list:
|
||||||
|
# For PY2:
|
||||||
|
r_str = str(resource)
|
||||||
|
|
||||||
|
if r_str not in conifg_schema['shapes']['ResourceType']['enum']:
|
||||||
|
bad_list.append(r_str)
|
||||||
|
|
||||||
|
if bad_list:
|
||||||
|
raise InvalidResourceTypeException(bad_list, conifg_schema['shapes']['ResourceType']['enum'])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_delivery_snapshot_properties(properties):
|
||||||
|
# Load the service file:
|
||||||
|
resource_package = 'botocore'
|
||||||
|
resource_path = '/'.join(('data', 'config', '2014-11-12', 'service-2.json'))
|
||||||
|
conifg_schema = json.loads(pkg_resources.resource_string(resource_package, resource_path))
|
||||||
|
|
||||||
|
# Verify that the deliveryFrequency is set to an acceptable value:
|
||||||
|
if properties.get('deliveryFrequency', None) not in \
|
||||||
|
conifg_schema['shapes']['MaximumExecutionFrequency']['enum']:
|
||||||
|
raise InvalidDeliveryFrequency(properties.get('deliveryFrequency', None),
|
||||||
|
conifg_schema['shapes']['MaximumExecutionFrequency']['enum'])
|
||||||
|
|
||||||
|
def put_configuration_recorder(self, config_recorder):
|
||||||
|
# Validate the name:
|
||||||
|
if not config_recorder.get('name'):
|
||||||
|
raise InvalidConfigurationRecorderNameException(config_recorder.get('name'))
|
||||||
|
if len(config_recorder.get('name')) > 256:
|
||||||
|
raise NameTooLongException(config_recorder.get('name'), 'configurationRecorder.name')
|
||||||
|
|
||||||
|
# We're going to assume that the passed in Role ARN is correct.
|
||||||
|
|
||||||
|
# Config currently only allows 1 configuration recorder for an account:
|
||||||
|
if len(self.recorders) == 1 and not self.recorders.get(config_recorder['name']):
|
||||||
|
raise MaxNumberOfConfigurationRecordersExceededException(config_recorder['name'])
|
||||||
|
|
||||||
|
# Is this updating an existing one?
|
||||||
|
recorder_status = None
|
||||||
|
if self.recorders.get(config_recorder['name']):
|
||||||
|
recorder_status = self.recorders[config_recorder['name']].status
|
||||||
|
|
||||||
|
# Validate the Recording Group:
|
||||||
|
if config_recorder.get('recordingGroup') is None:
|
||||||
|
recording_group = RecordingGroup()
|
||||||
|
else:
|
||||||
|
rg = config_recorder['recordingGroup']
|
||||||
|
|
||||||
|
# If an empty dict is passed in, then bad:
|
||||||
|
if not rg:
|
||||||
|
raise InvalidRecordingGroupException()
|
||||||
|
|
||||||
|
# Can't have both the resource types specified and the other flags as True.
|
||||||
|
if rg.get('resourceTypes') and (
|
||||||
|
rg.get('allSupported', False) or
|
||||||
|
rg.get('includeGlobalResourceTypes', False)):
|
||||||
|
raise InvalidRecordingGroupException()
|
||||||
|
|
||||||
|
# Must supply resourceTypes if 'allSupported' is not supplied:
|
||||||
|
if not rg.get('allSupported') and not rg.get('resourceTypes'):
|
||||||
|
raise InvalidRecordingGroupException()
|
||||||
|
|
||||||
|
# Validate that the list provided is correct:
|
||||||
|
self._validate_resource_types(rg.get('resourceTypes', []))
|
||||||
|
|
||||||
|
recording_group = RecordingGroup(
|
||||||
|
all_supported=rg.get('allSupported', True),
|
||||||
|
include_global_resource_types=rg.get('includeGlobalResourceTypes', False),
|
||||||
|
resource_types=rg.get('resourceTypes', [])
|
||||||
|
)
|
||||||
|
|
||||||
|
self.recorders[config_recorder['name']] = \
|
||||||
|
ConfigRecorder(config_recorder['roleARN'], recording_group, name=config_recorder['name'],
|
||||||
|
status=recorder_status)
|
||||||
|
|
||||||
|
def describe_configuration_recorders(self, recorder_names):
|
||||||
|
recorders = []
|
||||||
|
|
||||||
|
if recorder_names:
|
||||||
|
for rn in recorder_names:
|
||||||
|
if not self.recorders.get(rn):
|
||||||
|
raise NoSuchConfigurationRecorderException(rn)
|
||||||
|
|
||||||
|
# Format the recorder:
|
||||||
|
recorders.append(self.recorders[rn].to_dict())
|
||||||
|
|
||||||
|
else:
|
||||||
|
for recorder in self.recorders.values():
|
||||||
|
recorders.append(recorder.to_dict())
|
||||||
|
|
||||||
|
return recorders
|
||||||
|
|
||||||
|
def describe_configuration_recorder_status(self, recorder_names):
|
||||||
|
recorders = []
|
||||||
|
|
||||||
|
if recorder_names:
|
||||||
|
for rn in recorder_names:
|
||||||
|
if not self.recorders.get(rn):
|
||||||
|
raise NoSuchConfigurationRecorderException(rn)
|
||||||
|
|
||||||
|
# Format the recorder:
|
||||||
|
recorders.append(self.recorders[rn].status.to_dict())
|
||||||
|
|
||||||
|
else:
|
||||||
|
for recorder in self.recorders.values():
|
||||||
|
recorders.append(recorder.status.to_dict())
|
||||||
|
|
||||||
|
return recorders
|
||||||
|
|
||||||
|
def put_delivery_channel(self, delivery_channel):
|
||||||
|
# Must have a configuration recorder:
|
||||||
|
if not self.recorders:
|
||||||
|
raise NoAvailableConfigurationRecorderException()
|
||||||
|
|
||||||
|
# Validate the name:
|
||||||
|
if not delivery_channel.get('name'):
|
||||||
|
raise InvalidDeliveryChannelNameException(delivery_channel.get('name'))
|
||||||
|
if len(delivery_channel.get('name')) > 256:
|
||||||
|
raise NameTooLongException(delivery_channel.get('name'), 'deliveryChannel.name')
|
||||||
|
|
||||||
|
# We are going to assume that the bucket exists -- but will verify if the bucket provided is blank:
|
||||||
|
if not delivery_channel.get('s3BucketName'):
|
||||||
|
raise NoSuchBucketException()
|
||||||
|
|
||||||
|
# We are going to assume that the bucket has the correct policy attached to it. We are only going to verify
|
||||||
|
# if the prefix provided is not an empty string:
|
||||||
|
if delivery_channel.get('s3KeyPrefix', None) == '':
|
||||||
|
raise InvalidS3KeyPrefixException()
|
||||||
|
|
||||||
|
# Ditto for SNS -- Only going to assume that the ARN provided is not an empty string:
|
||||||
|
if delivery_channel.get('snsTopicARN', None) == '':
|
||||||
|
raise InvalidSNSTopicARNException()
|
||||||
|
|
||||||
|
# Config currently only allows 1 delivery channel for an account:
|
||||||
|
if len(self.delivery_channels) == 1 and not self.delivery_channels.get(delivery_channel['name']):
|
||||||
|
raise MaxNumberOfDeliveryChannelsExceededException(delivery_channel['name'])
|
||||||
|
|
||||||
|
if not delivery_channel.get('configSnapshotDeliveryProperties'):
|
||||||
|
dp = None
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Validate the config snapshot delivery properties:
|
||||||
|
self._validate_delivery_snapshot_properties(delivery_channel['configSnapshotDeliveryProperties'])
|
||||||
|
|
||||||
|
dp = ConfigDeliverySnapshotProperties(
|
||||||
|
delivery_channel['configSnapshotDeliveryProperties']['deliveryFrequency'])
|
||||||
|
|
||||||
|
self.delivery_channels[delivery_channel['name']] = \
|
||||||
|
ConfigDeliveryChannel(delivery_channel['name'], delivery_channel['s3BucketName'],
|
||||||
|
prefix=delivery_channel.get('s3KeyPrefix', None),
|
||||||
|
sns_arn=delivery_channel.get('snsTopicARN', None),
|
||||||
|
snapshot_properties=dp)
|
||||||
|
|
||||||
|
def describe_delivery_channels(self, channel_names):
|
||||||
|
channels = []
|
||||||
|
|
||||||
|
if channel_names:
|
||||||
|
for cn in channel_names:
|
||||||
|
if not self.delivery_channels.get(cn):
|
||||||
|
raise NoSuchDeliveryChannelException(cn)
|
||||||
|
|
||||||
|
# Format the delivery channel:
|
||||||
|
channels.append(self.delivery_channels[cn].to_dict())
|
||||||
|
|
||||||
|
else:
|
||||||
|
for channel in self.delivery_channels.values():
|
||||||
|
channels.append(channel.to_dict())
|
||||||
|
|
||||||
|
return channels
|
||||||
|
|
||||||
|
def start_configuration_recorder(self, recorder_name):
|
||||||
|
if not self.recorders.get(recorder_name):
|
||||||
|
raise NoSuchConfigurationRecorderException(recorder_name)
|
||||||
|
|
||||||
|
# Must have a delivery channel available as well:
|
||||||
|
if not self.delivery_channels:
|
||||||
|
raise NoAvailableDeliveryChannelException()
|
||||||
|
|
||||||
|
# Start recording:
|
||||||
|
self.recorders[recorder_name].status.start()
|
||||||
|
|
||||||
|
def stop_configuration_recorder(self, recorder_name):
|
||||||
|
if not self.recorders.get(recorder_name):
|
||||||
|
raise NoSuchConfigurationRecorderException(recorder_name)
|
||||||
|
|
||||||
|
# Stop recording:
|
||||||
|
self.recorders[recorder_name].status.stop()
|
||||||
|
|
||||||
|
def delete_configuration_recorder(self, recorder_name):
|
||||||
|
if not self.recorders.get(recorder_name):
|
||||||
|
raise NoSuchConfigurationRecorderException(recorder_name)
|
||||||
|
|
||||||
|
del self.recorders[recorder_name]
|
||||||
|
|
||||||
|
def delete_delivery_channel(self, channel_name):
|
||||||
|
if not self.delivery_channels.get(channel_name):
|
||||||
|
raise NoSuchDeliveryChannelException(channel_name)
|
||||||
|
|
||||||
|
# Check if a channel is recording -- if so, bad -- (there can only be 1 recorder):
|
||||||
|
for recorder in self.recorders.values():
|
||||||
|
if recorder.status.recording:
|
||||||
|
raise LastDeliveryChannelDeleteFailedException(channel_name)
|
||||||
|
|
||||||
|
del self.delivery_channels[channel_name]
|
||||||
|
|
||||||
|
|
||||||
|
config_backends = {}
|
||||||
|
boto3_session = Session()
|
||||||
|
for region in boto3_session.get_available_regions('config'):
|
||||||
|
config_backends[region] = ConfigBackend()
|
53
moto/config/responses.py
Normal file
53
moto/config/responses.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
import json
|
||||||
|
from moto.core.responses import BaseResponse
|
||||||
|
from .models import config_backends
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigResponse(BaseResponse):
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config_backend(self):
|
||||||
|
return config_backends[self.region]
|
||||||
|
|
||||||
|
def put_configuration_recorder(self):
|
||||||
|
self.config_backend.put_configuration_recorder(self._get_param('ConfigurationRecorder'))
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def describe_configuration_recorders(self):
|
||||||
|
recorders = self.config_backend.describe_configuration_recorders(self._get_param('ConfigurationRecorderNames'))
|
||||||
|
schema = {'ConfigurationRecorders': recorders}
|
||||||
|
return json.dumps(schema)
|
||||||
|
|
||||||
|
def describe_configuration_recorder_status(self):
|
||||||
|
recorder_statuses = self.config_backend.describe_configuration_recorder_status(
|
||||||
|
self._get_param('ConfigurationRecorderNames'))
|
||||||
|
schema = {'ConfigurationRecordersStatus': recorder_statuses}
|
||||||
|
return json.dumps(schema)
|
||||||
|
|
||||||
|
def put_delivery_channel(self):
|
||||||
|
self.config_backend.put_delivery_channel(self._get_param('DeliveryChannel'))
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def describe_delivery_channels(self):
|
||||||
|
delivery_channels = self.config_backend.describe_delivery_channels(self._get_param('DeliveryChannelNames'))
|
||||||
|
schema = {'DeliveryChannels': delivery_channels}
|
||||||
|
return json.dumps(schema)
|
||||||
|
|
||||||
|
def describe_delivery_channel_status(self):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def delete_delivery_channel(self):
|
||||||
|
self.config_backend.delete_delivery_channel(self._get_param('DeliveryChannelName'))
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def delete_configuration_recorder(self):
|
||||||
|
self.config_backend.delete_configuration_recorder(self._get_param('ConfigurationRecorderName'))
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def start_configuration_recorder(self):
|
||||||
|
self.config_backend.start_configuration_recorder(self._get_param('ConfigurationRecorderName'))
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def stop_configuration_recorder(self):
|
||||||
|
self.config_backend.stop_configuration_recorder(self._get_param('ConfigurationRecorderName'))
|
||||||
|
return ""
|
10
moto/config/urls.py
Normal file
10
moto/config/urls.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
from .responses import ConfigResponse
|
||||||
|
|
||||||
|
url_bases = [
|
||||||
|
"https?://config.(.+).amazonaws.com",
|
||||||
|
]
|
||||||
|
|
||||||
|
url_paths = {
|
||||||
|
'{0}/$': ConfigResponse.dispatch,
|
||||||
|
}
|
@ -4,6 +4,7 @@ from __future__ import absolute_import
|
|||||||
|
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
import six
|
import six
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
@ -21,6 +22,11 @@ from .utils import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# "Mock" the AWS credentials as they can't be mocked in Botocore currently
|
||||||
|
os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key")
|
||||||
|
os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret")
|
||||||
|
|
||||||
|
|
||||||
class BaseMockAWS(object):
|
class BaseMockAWS(object):
|
||||||
nested_count = 0
|
nested_count = 0
|
||||||
|
|
||||||
|
@ -718,6 +718,8 @@ def to_str(value, spec):
|
|||||||
return str(value)
|
return str(value)
|
||||||
elif vtype == 'float':
|
elif vtype == 'float':
|
||||||
return str(value)
|
return str(value)
|
||||||
|
elif vtype == 'double':
|
||||||
|
return str(value)
|
||||||
elif vtype == 'timestamp':
|
elif vtype == 'timestamp':
|
||||||
return datetime.datetime.utcfromtimestamp(
|
return datetime.datetime.utcfromtimestamp(
|
||||||
value).replace(tzinfo=pytz.utc).isoformat()
|
value).replace(tzinfo=pytz.utc).isoformat()
|
||||||
@ -737,6 +739,8 @@ def from_str(value, spec):
|
|||||||
return int(value)
|
return int(value)
|
||||||
elif vtype == 'float':
|
elif vtype == 'float':
|
||||||
return float(value)
|
return float(value)
|
||||||
|
elif vtype == 'double':
|
||||||
|
return float(value)
|
||||||
elif vtype == 'timestamp':
|
elif vtype == 'timestamp':
|
||||||
return value
|
return value
|
||||||
elif vtype == 'string':
|
elif vtype == 'string':
|
||||||
|
@ -280,7 +280,7 @@ def amzn_request_id(f):
|
|||||||
|
|
||||||
# Update request ID in XML
|
# Update request ID in XML
|
||||||
try:
|
try:
|
||||||
body = body.replace('{{ requestid }}', request_id)
|
body = re.sub(r'(?<=<RequestId>).*(?=<\/RequestId>)', request_id, body)
|
||||||
except Exception: # Will just ignore if it cant work on bytes (which are str's on python2)
|
except Exception: # Will just ignore if it cant work on bytes (which are str's on python2)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -383,7 +383,7 @@ class OpNotEqual(Op):
|
|||||||
def expr(self, item):
|
def expr(self, item):
|
||||||
lhs = self._lhs(item)
|
lhs = self._lhs(item)
|
||||||
rhs = self._rhs(item)
|
rhs = self._rhs(item)
|
||||||
return lhs == rhs
|
return lhs != rhs
|
||||||
|
|
||||||
|
|
||||||
class OpLessThanOrEqual(Op):
|
class OpLessThanOrEqual(Op):
|
||||||
|
@ -5,6 +5,7 @@ import datetime
|
|||||||
import decimal
|
import decimal
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
import uuid
|
||||||
|
|
||||||
import boto3
|
import boto3
|
||||||
from moto.compat import OrderedDict
|
from moto.compat import OrderedDict
|
||||||
@ -65,6 +66,8 @@ class DynamoType(object):
|
|||||||
return int(self.value)
|
return int(self.value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return float(self.value)
|
return float(self.value)
|
||||||
|
elif self.is_set():
|
||||||
|
return set(self.value)
|
||||||
else:
|
else:
|
||||||
return self.value
|
return self.value
|
||||||
|
|
||||||
@ -292,9 +295,82 @@ class Item(BaseModel):
|
|||||||
'ADD not supported for %s' % ', '.join(update_action['Value'].keys()))
|
'ADD not supported for %s' % ', '.join(update_action['Value'].keys()))
|
||||||
|
|
||||||
|
|
||||||
|
class StreamRecord(BaseModel):
|
||||||
|
def __init__(self, table, stream_type, event_name, old, new, seq):
|
||||||
|
old_a = old.to_json()['Attributes'] if old is not None else {}
|
||||||
|
new_a = new.to_json()['Attributes'] if new is not None else {}
|
||||||
|
|
||||||
|
rec = old if old is not None else new
|
||||||
|
keys = {table.hash_key_attr: rec.hash_key.to_json()}
|
||||||
|
if table.range_key_attr is not None:
|
||||||
|
keys[table.range_key_attr] = rec.range_key.to_json()
|
||||||
|
|
||||||
|
self.record = {
|
||||||
|
'eventID': uuid.uuid4().hex,
|
||||||
|
'eventName': event_name,
|
||||||
|
'eventSource': 'aws:dynamodb',
|
||||||
|
'eventVersion': '1.0',
|
||||||
|
'awsRegion': 'us-east-1',
|
||||||
|
'dynamodb': {
|
||||||
|
'StreamViewType': stream_type,
|
||||||
|
'ApproximateCreationDateTime': datetime.datetime.utcnow().isoformat(),
|
||||||
|
'SequenceNumber': seq,
|
||||||
|
'SizeBytes': 1,
|
||||||
|
'Keys': keys
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'):
|
||||||
|
self.record['dynamodb']['NewImage'] = new_a
|
||||||
|
if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'):
|
||||||
|
self.record['dynamodb']['OldImage'] = old_a
|
||||||
|
|
||||||
|
# This is a substantial overestimate but it's the easiest to do now
|
||||||
|
self.record['dynamodb']['SizeBytes'] = len(
|
||||||
|
json.dumps(self.record['dynamodb']))
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
return self.record
|
||||||
|
|
||||||
|
|
||||||
|
class StreamShard(BaseModel):
|
||||||
|
def __init__(self, table):
|
||||||
|
self.table = table
|
||||||
|
self.id = 'shardId-00000001541626099285-f35f62ef'
|
||||||
|
self.starting_sequence_number = 1100000000017454423009
|
||||||
|
self.items = []
|
||||||
|
self.created_on = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
return {
|
||||||
|
'ShardId': self.id,
|
||||||
|
'SequenceNumberRange': {
|
||||||
|
'StartingSequenceNumber': str(self.starting_sequence_number)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def add(self, old, new):
|
||||||
|
t = self.table.stream_specification['StreamViewType']
|
||||||
|
if old is None:
|
||||||
|
event_name = 'INSERT'
|
||||||
|
elif new is None:
|
||||||
|
event_name = 'DELETE'
|
||||||
|
else:
|
||||||
|
event_name = 'MODIFY'
|
||||||
|
seq = len(self.items) + self.starting_sequence_number
|
||||||
|
self.items.append(
|
||||||
|
StreamRecord(self.table, t, event_name, old, new, seq))
|
||||||
|
|
||||||
|
def get(self, start, quantity):
|
||||||
|
start -= self.starting_sequence_number
|
||||||
|
assert start >= 0
|
||||||
|
end = start + quantity
|
||||||
|
return [i.to_json() for i in self.items[start:end]]
|
||||||
|
|
||||||
|
|
||||||
class Table(BaseModel):
|
class Table(BaseModel):
|
||||||
|
|
||||||
def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None):
|
def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None):
|
||||||
self.name = table_name
|
self.name = table_name
|
||||||
self.attr = attr
|
self.attr = attr
|
||||||
self.schema = schema
|
self.schema = schema
|
||||||
@ -325,10 +401,22 @@ class Table(BaseModel):
|
|||||||
'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED',
|
'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED',
|
||||||
# 'AttributeName': 'string' # Can contain this
|
# 'AttributeName': 'string' # Can contain this
|
||||||
}
|
}
|
||||||
|
self.set_stream_specification(streams)
|
||||||
|
|
||||||
def _generate_arn(self, name):
|
def _generate_arn(self, name):
|
||||||
return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name
|
return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name
|
||||||
|
|
||||||
|
def set_stream_specification(self, streams):
|
||||||
|
self.stream_specification = streams
|
||||||
|
if streams and (streams.get('StreamEnabled') or streams.get('StreamViewType')):
|
||||||
|
self.stream_specification['StreamEnabled'] = True
|
||||||
|
self.latest_stream_label = datetime.datetime.utcnow().isoformat()
|
||||||
|
self.stream_shard = StreamShard(self)
|
||||||
|
else:
|
||||||
|
self.stream_specification = {'StreamEnabled': False}
|
||||||
|
self.latest_stream_label = None
|
||||||
|
self.stream_shard = None
|
||||||
|
|
||||||
def describe(self, base_key='TableDescription'):
|
def describe(self, base_key='TableDescription'):
|
||||||
results = {
|
results = {
|
||||||
base_key: {
|
base_key: {
|
||||||
@ -345,6 +433,11 @@ class Table(BaseModel):
|
|||||||
'LocalSecondaryIndexes': [index for index in self.indexes],
|
'LocalSecondaryIndexes': [index for index in self.indexes],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if self.stream_specification and self.stream_specification['StreamEnabled']:
|
||||||
|
results[base_key]['StreamSpecification'] = self.stream_specification
|
||||||
|
if self.latest_stream_label:
|
||||||
|
results[base_key]['LatestStreamLabel'] = self.latest_stream_label
|
||||||
|
results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
@ -385,10 +478,6 @@ class Table(BaseModel):
|
|||||||
else:
|
else:
|
||||||
range_value = None
|
range_value = None
|
||||||
|
|
||||||
item = Item(hash_value, self.hash_key_type, range_value,
|
|
||||||
self.range_key_type, item_attrs)
|
|
||||||
|
|
||||||
if not overwrite:
|
|
||||||
if expected is None:
|
if expected is None:
|
||||||
expected = {}
|
expected = {}
|
||||||
lookup_range_value = range_value
|
lookup_range_value = range_value
|
||||||
@ -399,9 +488,12 @@ class Table(BaseModel):
|
|||||||
lookup_range_value = range_value
|
lookup_range_value = range_value
|
||||||
else:
|
else:
|
||||||
lookup_range_value = DynamoType(expected_range_value)
|
lookup_range_value = DynamoType(expected_range_value)
|
||||||
|
|
||||||
current = self.get_item(hash_value, lookup_range_value)
|
current = self.get_item(hash_value, lookup_range_value)
|
||||||
|
|
||||||
|
item = Item(hash_value, self.hash_key_type, range_value,
|
||||||
|
self.range_key_type, item_attrs)
|
||||||
|
|
||||||
|
if not overwrite:
|
||||||
if current is None:
|
if current is None:
|
||||||
current_attr = {}
|
current_attr = {}
|
||||||
elif hasattr(current, 'attrs'):
|
elif hasattr(current, 'attrs'):
|
||||||
@ -419,19 +511,20 @@ class Table(BaseModel):
|
|||||||
elif 'Value' in val and DynamoType(val['Value']).value != current_attr[key].value:
|
elif 'Value' in val and DynamoType(val['Value']).value != current_attr[key].value:
|
||||||
raise ValueError("The conditional request failed")
|
raise ValueError("The conditional request failed")
|
||||||
elif 'ComparisonOperator' in val:
|
elif 'ComparisonOperator' in val:
|
||||||
comparison_func = get_comparison_func(
|
|
||||||
val['ComparisonOperator'])
|
|
||||||
dynamo_types = [
|
dynamo_types = [
|
||||||
DynamoType(ele) for ele in
|
DynamoType(ele) for ele in
|
||||||
val.get("AttributeValueList", [])
|
val.get("AttributeValueList", [])
|
||||||
]
|
]
|
||||||
for t in dynamo_types:
|
if not current_attr[key].compare(val['ComparisonOperator'], dynamo_types):
|
||||||
if not comparison_func(current_attr[key].value, t.value):
|
|
||||||
raise ValueError('The conditional request failed')
|
raise ValueError('The conditional request failed')
|
||||||
if range_value:
|
if range_value:
|
||||||
self.items[hash_value][range_value] = item
|
self.items[hash_value][range_value] = item
|
||||||
else:
|
else:
|
||||||
self.items[hash_value] = item
|
self.items[hash_value] = item
|
||||||
|
|
||||||
|
if self.stream_shard is not None:
|
||||||
|
self.stream_shard.add(current, item)
|
||||||
|
|
||||||
return item
|
return item
|
||||||
|
|
||||||
def __nonzero__(self):
|
def __nonzero__(self):
|
||||||
@ -462,9 +555,14 @@ class Table(BaseModel):
|
|||||||
def delete_item(self, hash_key, range_key):
|
def delete_item(self, hash_key, range_key):
|
||||||
try:
|
try:
|
||||||
if range_key:
|
if range_key:
|
||||||
return self.items[hash_key].pop(range_key)
|
item = self.items[hash_key].pop(range_key)
|
||||||
else:
|
else:
|
||||||
return self.items.pop(hash_key)
|
item = self.items.pop(hash_key)
|
||||||
|
|
||||||
|
if self.stream_shard is not None:
|
||||||
|
self.stream_shard.add(item, None)
|
||||||
|
|
||||||
|
return item
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -472,6 +570,7 @@ class Table(BaseModel):
|
|||||||
exclusive_start_key, scan_index_forward, projection_expression,
|
exclusive_start_key, scan_index_forward, projection_expression,
|
||||||
index_name=None, filter_expression=None, **filter_kwargs):
|
index_name=None, filter_expression=None, **filter_kwargs):
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
if index_name:
|
if index_name:
|
||||||
all_indexes = (self.global_indexes or []) + (self.indexes or [])
|
all_indexes = (self.global_indexes or []) + (self.indexes or [])
|
||||||
indexes_by_name = dict((i['IndexName'], i) for i in all_indexes)
|
indexes_by_name = dict((i['IndexName'], i) for i in all_indexes)
|
||||||
@ -488,24 +587,28 @@ class Table(BaseModel):
|
|||||||
raise ValueError('Missing Hash Key. KeySchema: %s' %
|
raise ValueError('Missing Hash Key. KeySchema: %s' %
|
||||||
index['KeySchema'])
|
index['KeySchema'])
|
||||||
|
|
||||||
possible_results = []
|
|
||||||
for item in self.all_items():
|
|
||||||
if not isinstance(item, Item):
|
|
||||||
continue
|
|
||||||
item_hash_key = item.attrs.get(index_hash_key['AttributeName'])
|
|
||||||
if item_hash_key and item_hash_key == hash_key:
|
|
||||||
possible_results.append(item)
|
|
||||||
else:
|
|
||||||
possible_results = [item for item in list(self.all_items()) if isinstance(
|
|
||||||
item, Item) and item.hash_key == hash_key]
|
|
||||||
|
|
||||||
if index_name:
|
|
||||||
try:
|
try:
|
||||||
index_range_key = [key for key in index[
|
index_range_key = [key for key in index[
|
||||||
'KeySchema'] if key['KeyType'] == 'RANGE'][0]
|
'KeySchema'] if key['KeyType'] == 'RANGE'][0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
index_range_key = None
|
index_range_key = None
|
||||||
|
|
||||||
|
possible_results = []
|
||||||
|
for item in self.all_items():
|
||||||
|
if not isinstance(item, Item):
|
||||||
|
continue
|
||||||
|
item_hash_key = item.attrs.get(index_hash_key['AttributeName'])
|
||||||
|
if index_range_key is None:
|
||||||
|
if item_hash_key and item_hash_key == hash_key:
|
||||||
|
possible_results.append(item)
|
||||||
|
else:
|
||||||
|
item_range_key = item.attrs.get(index_range_key['AttributeName'])
|
||||||
|
if item_hash_key and item_hash_key == hash_key and item_range_key:
|
||||||
|
possible_results.append(item)
|
||||||
|
else:
|
||||||
|
possible_results = [item for item in list(self.all_items()) if isinstance(
|
||||||
|
item, Item) and item.hash_key == hash_key]
|
||||||
|
|
||||||
if range_comparison:
|
if range_comparison:
|
||||||
if index_name and not index_range_key:
|
if index_name and not index_range_key:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
@ -680,6 +783,13 @@ class DynamoDBBackend(BaseBackend):
|
|||||||
table.throughput = throughput
|
table.throughput = throughput
|
||||||
return table
|
return table
|
||||||
|
|
||||||
|
def update_table_streams(self, name, stream_specification):
|
||||||
|
table = self.tables[name]
|
||||||
|
if (stream_specification.get('StreamEnabled') or stream_specification.get('StreamViewType')) and table.latest_stream_label:
|
||||||
|
raise ValueError('Table already has stream enabled')
|
||||||
|
table.set_stream_specification(stream_specification)
|
||||||
|
return table
|
||||||
|
|
||||||
def update_table_global_indexes(self, name, global_index_updates):
|
def update_table_global_indexes(self, name, global_index_updates):
|
||||||
table = self.tables[name]
|
table = self.tables[name]
|
||||||
gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes)
|
gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes)
|
||||||
@ -840,14 +950,11 @@ class DynamoDBBackend(BaseBackend):
|
|||||||
elif 'Value' in val and DynamoType(val['Value']).value != item_attr[key].value:
|
elif 'Value' in val and DynamoType(val['Value']).value != item_attr[key].value:
|
||||||
raise ValueError("The conditional request failed")
|
raise ValueError("The conditional request failed")
|
||||||
elif 'ComparisonOperator' in val:
|
elif 'ComparisonOperator' in val:
|
||||||
comparison_func = get_comparison_func(
|
|
||||||
val['ComparisonOperator'])
|
|
||||||
dynamo_types = [
|
dynamo_types = [
|
||||||
DynamoType(ele) for ele in
|
DynamoType(ele) for ele in
|
||||||
val.get("AttributeValueList", [])
|
val.get("AttributeValueList", [])
|
||||||
]
|
]
|
||||||
for t in dynamo_types:
|
if not item_attr[key].compare(val['ComparisonOperator'], dynamo_types):
|
||||||
if not comparison_func(item_attr[key].value, t.value):
|
|
||||||
raise ValueError('The conditional request failed')
|
raise ValueError('The conditional request failed')
|
||||||
|
|
||||||
# Update does not fail on new items, so create one
|
# Update does not fail on new items, so create one
|
||||||
|
@ -31,6 +31,67 @@ def get_empty_str_error():
|
|||||||
))
|
))
|
||||||
|
|
||||||
|
|
||||||
|
def condition_expression_to_expected(condition_expression, expression_attribute_names, expression_attribute_values):
|
||||||
|
"""
|
||||||
|
Limited condition expression syntax parsing.
|
||||||
|
Supports Global Negation ex: NOT(inner expressions).
|
||||||
|
Supports simple AND conditions ex: cond_a AND cond_b and cond_c.
|
||||||
|
Atomic expressions supported are attribute_exists(key), attribute_not_exists(key) and #key = :value.
|
||||||
|
"""
|
||||||
|
expected = {}
|
||||||
|
if condition_expression and 'OR' not in condition_expression:
|
||||||
|
reverse_re = re.compile('^NOT\s*\((.*)\)$')
|
||||||
|
reverse_m = reverse_re.match(condition_expression.strip())
|
||||||
|
|
||||||
|
reverse = False
|
||||||
|
if reverse_m:
|
||||||
|
reverse = True
|
||||||
|
condition_expression = reverse_m.group(1)
|
||||||
|
|
||||||
|
cond_items = [c.strip() for c in condition_expression.split('AND')]
|
||||||
|
if cond_items:
|
||||||
|
exists_re = re.compile('^attribute_exists\s*\((.*)\)$')
|
||||||
|
not_exists_re = re.compile(
|
||||||
|
'^attribute_not_exists\s*\((.*)\)$')
|
||||||
|
equals_re = re.compile('^(#?\w+)\s*=\s*(\:?\w+)')
|
||||||
|
|
||||||
|
for cond in cond_items:
|
||||||
|
exists_m = exists_re.match(cond)
|
||||||
|
not_exists_m = not_exists_re.match(cond)
|
||||||
|
equals_m = equals_re.match(cond)
|
||||||
|
|
||||||
|
if exists_m:
|
||||||
|
attribute_name = expression_attribute_names_lookup(exists_m.group(1), expression_attribute_names)
|
||||||
|
expected[attribute_name] = {'Exists': True if not reverse else False}
|
||||||
|
elif not_exists_m:
|
||||||
|
attribute_name = expression_attribute_names_lookup(not_exists_m.group(1), expression_attribute_names)
|
||||||
|
expected[attribute_name] = {'Exists': False if not reverse else True}
|
||||||
|
elif equals_m:
|
||||||
|
attribute_name = expression_attribute_names_lookup(equals_m.group(1), expression_attribute_names)
|
||||||
|
attribute_value = expression_attribute_values_lookup(equals_m.group(2), expression_attribute_values)
|
||||||
|
expected[attribute_name] = {
|
||||||
|
'AttributeValueList': [attribute_value],
|
||||||
|
'ComparisonOperator': 'EQ' if not reverse else 'NEQ'}
|
||||||
|
|
||||||
|
return expected
|
||||||
|
|
||||||
|
|
||||||
|
def expression_attribute_names_lookup(attribute_name, expression_attribute_names):
|
||||||
|
if attribute_name.startswith('#') and attribute_name in expression_attribute_names:
|
||||||
|
return expression_attribute_names[attribute_name]
|
||||||
|
else:
|
||||||
|
return attribute_name
|
||||||
|
|
||||||
|
|
||||||
|
def expression_attribute_values_lookup(attribute_value, expression_attribute_values):
|
||||||
|
if isinstance(attribute_value, six.string_types) and \
|
||||||
|
attribute_value.startswith(':') and\
|
||||||
|
attribute_value in expression_attribute_values:
|
||||||
|
return expression_attribute_values[attribute_value]
|
||||||
|
else:
|
||||||
|
return attribute_value
|
||||||
|
|
||||||
|
|
||||||
class DynamoHandler(BaseResponse):
|
class DynamoHandler(BaseResponse):
|
||||||
|
|
||||||
def get_endpoint_name(self, headers):
|
def get_endpoint_name(self, headers):
|
||||||
@ -95,7 +156,15 @@ class DynamoHandler(BaseResponse):
|
|||||||
body = self.body
|
body = self.body
|
||||||
# get the table name
|
# get the table name
|
||||||
table_name = body['TableName']
|
table_name = body['TableName']
|
||||||
# get the throughput
|
# check billing mode and get the throughput
|
||||||
|
if "BillingMode" in body.keys() and body["BillingMode"] == "PAY_PER_REQUEST":
|
||||||
|
if "ProvisionedThroughput" in body.keys():
|
||||||
|
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
|
||||||
|
return self.error(er,
|
||||||
|
'ProvisionedThroughput cannot be specified \
|
||||||
|
when BillingMode is PAY_PER_REQUEST')
|
||||||
|
throughput = None
|
||||||
|
else: # Provisioned (default billing mode)
|
||||||
throughput = body["ProvisionedThroughput"]
|
throughput = body["ProvisionedThroughput"]
|
||||||
# getting the schema
|
# getting the schema
|
||||||
key_schema = body['KeySchema']
|
key_schema = body['KeySchema']
|
||||||
@ -104,13 +173,16 @@ class DynamoHandler(BaseResponse):
|
|||||||
# getting the indexes
|
# getting the indexes
|
||||||
global_indexes = body.get("GlobalSecondaryIndexes", [])
|
global_indexes = body.get("GlobalSecondaryIndexes", [])
|
||||||
local_secondary_indexes = body.get("LocalSecondaryIndexes", [])
|
local_secondary_indexes = body.get("LocalSecondaryIndexes", [])
|
||||||
|
# get the stream specification
|
||||||
|
streams = body.get("StreamSpecification")
|
||||||
|
|
||||||
table = self.dynamodb_backend.create_table(table_name,
|
table = self.dynamodb_backend.create_table(table_name,
|
||||||
schema=key_schema,
|
schema=key_schema,
|
||||||
throughput=throughput,
|
throughput=throughput,
|
||||||
attr=attr,
|
attr=attr,
|
||||||
global_indexes=global_indexes,
|
global_indexes=global_indexes,
|
||||||
indexes=local_secondary_indexes)
|
indexes=local_secondary_indexes,
|
||||||
|
streams=streams)
|
||||||
if table is not None:
|
if table is not None:
|
||||||
return dynamo_json_dump(table.describe())
|
return dynamo_json_dump(table.describe())
|
||||||
else:
|
else:
|
||||||
@ -163,12 +235,20 @@ class DynamoHandler(BaseResponse):
|
|||||||
|
|
||||||
def update_table(self):
|
def update_table(self):
|
||||||
name = self.body['TableName']
|
name = self.body['TableName']
|
||||||
|
table = self.dynamodb_backend.get_table(name)
|
||||||
if 'GlobalSecondaryIndexUpdates' in self.body:
|
if 'GlobalSecondaryIndexUpdates' in self.body:
|
||||||
table = self.dynamodb_backend.update_table_global_indexes(
|
table = self.dynamodb_backend.update_table_global_indexes(
|
||||||
name, self.body['GlobalSecondaryIndexUpdates'])
|
name, self.body['GlobalSecondaryIndexUpdates'])
|
||||||
if 'ProvisionedThroughput' in self.body:
|
if 'ProvisionedThroughput' in self.body:
|
||||||
throughput = self.body["ProvisionedThroughput"]
|
throughput = self.body["ProvisionedThroughput"]
|
||||||
table = self.dynamodb_backend.update_table_throughput(name, throughput)
|
table = self.dynamodb_backend.update_table_throughput(name, throughput)
|
||||||
|
if 'StreamSpecification' in self.body:
|
||||||
|
try:
|
||||||
|
table = self.dynamodb_backend.update_table_streams(name, self.body['StreamSpecification'])
|
||||||
|
except ValueError:
|
||||||
|
er = 'com.amazonaws.dynamodb.v20111205#ResourceInUseException'
|
||||||
|
return self.error(er, 'Cannot enable stream')
|
||||||
|
|
||||||
return dynamo_json_dump(table.describe())
|
return dynamo_json_dump(table.describe())
|
||||||
|
|
||||||
def describe_table(self):
|
def describe_table(self):
|
||||||
@ -183,6 +263,11 @@ class DynamoHandler(BaseResponse):
|
|||||||
def put_item(self):
|
def put_item(self):
|
||||||
name = self.body['TableName']
|
name = self.body['TableName']
|
||||||
item = self.body['Item']
|
item = self.body['Item']
|
||||||
|
return_values = self.body.get('ReturnValues', 'NONE')
|
||||||
|
|
||||||
|
if return_values not in ('ALL_OLD', 'NONE'):
|
||||||
|
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
|
||||||
|
return self.error(er, 'Return values set to invalid value')
|
||||||
|
|
||||||
if has_empty_keys_or_values(item):
|
if has_empty_keys_or_values(item):
|
||||||
return get_empty_str_error()
|
return get_empty_str_error()
|
||||||
@ -193,28 +278,24 @@ class DynamoHandler(BaseResponse):
|
|||||||
else:
|
else:
|
||||||
expected = None
|
expected = None
|
||||||
|
|
||||||
|
if return_values == 'ALL_OLD':
|
||||||
|
existing_item = self.dynamodb_backend.get_item(name, item)
|
||||||
|
if existing_item:
|
||||||
|
existing_attributes = existing_item.to_json()['Attributes']
|
||||||
|
else:
|
||||||
|
existing_attributes = {}
|
||||||
|
|
||||||
# Attempt to parse simple ConditionExpressions into an Expected
|
# Attempt to parse simple ConditionExpressions into an Expected
|
||||||
# expression
|
# expression
|
||||||
if not expected:
|
if not expected:
|
||||||
condition_expression = self.body.get('ConditionExpression')
|
condition_expression = self.body.get('ConditionExpression')
|
||||||
if condition_expression and 'OR' not in condition_expression:
|
expression_attribute_names = self.body.get('ExpressionAttributeNames', {})
|
||||||
cond_items = [c.strip()
|
expression_attribute_values = self.body.get('ExpressionAttributeValues', {})
|
||||||
for c in condition_expression.split('AND')]
|
expected = condition_expression_to_expected(condition_expression,
|
||||||
|
expression_attribute_names,
|
||||||
if cond_items:
|
expression_attribute_values)
|
||||||
expected = {}
|
if expected:
|
||||||
overwrite = False
|
overwrite = False
|
||||||
exists_re = re.compile('^attribute_exists\s*\((.*)\)$')
|
|
||||||
not_exists_re = re.compile(
|
|
||||||
'^attribute_not_exists\s*\((.*)\)$')
|
|
||||||
|
|
||||||
for cond in cond_items:
|
|
||||||
exists_m = exists_re.match(cond)
|
|
||||||
not_exists_m = not_exists_re.match(cond)
|
|
||||||
if exists_m:
|
|
||||||
expected[exists_m.group(1)] = {'Exists': True}
|
|
||||||
elif not_exists_m:
|
|
||||||
expected[not_exists_m.group(1)] = {'Exists': False}
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = self.dynamodb_backend.put_item(name, item, expected, overwrite)
|
result = self.dynamodb_backend.put_item(name, item, expected, overwrite)
|
||||||
@ -228,6 +309,10 @@ class DynamoHandler(BaseResponse):
|
|||||||
'TableName': name,
|
'TableName': name,
|
||||||
'CapacityUnits': 1
|
'CapacityUnits': 1
|
||||||
}
|
}
|
||||||
|
if return_values == 'ALL_OLD':
|
||||||
|
item_dict['Attributes'] = existing_attributes
|
||||||
|
else:
|
||||||
|
item_dict.pop('Attributes', None)
|
||||||
return dynamo_json_dump(item_dict)
|
return dynamo_json_dump(item_dict)
|
||||||
else:
|
else:
|
||||||
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
|
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
|
||||||
@ -385,7 +470,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
range_values = [value_alias_map[
|
range_values = [value_alias_map[
|
||||||
range_key_expression_components[2]]]
|
range_key_expression_components[2]]]
|
||||||
else:
|
else:
|
||||||
hash_key_expression = key_condition_expression
|
hash_key_expression = key_condition_expression.strip('()')
|
||||||
range_comparison = None
|
range_comparison = None
|
||||||
range_values = []
|
range_values = []
|
||||||
|
|
||||||
@ -512,7 +597,11 @@ class DynamoHandler(BaseResponse):
|
|||||||
def delete_item(self):
|
def delete_item(self):
|
||||||
name = self.body['TableName']
|
name = self.body['TableName']
|
||||||
keys = self.body['Key']
|
keys = self.body['Key']
|
||||||
return_values = self.body.get('ReturnValues', '')
|
return_values = self.body.get('ReturnValues', 'NONE')
|
||||||
|
if return_values not in ('ALL_OLD', 'NONE'):
|
||||||
|
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
|
||||||
|
return self.error(er, 'Return values set to invalid value')
|
||||||
|
|
||||||
table = self.dynamodb_backend.get_table(name)
|
table = self.dynamodb_backend.get_table(name)
|
||||||
if not table:
|
if not table:
|
||||||
er = 'com.amazonaws.dynamodb.v20120810#ConditionalCheckFailedException'
|
er = 'com.amazonaws.dynamodb.v20120810#ConditionalCheckFailedException'
|
||||||
@ -527,9 +616,9 @@ class DynamoHandler(BaseResponse):
|
|||||||
return dynamo_json_dump(item_dict)
|
return dynamo_json_dump(item_dict)
|
||||||
|
|
||||||
def update_item(self):
|
def update_item(self):
|
||||||
|
|
||||||
name = self.body['TableName']
|
name = self.body['TableName']
|
||||||
key = self.body['Key']
|
key = self.body['Key']
|
||||||
|
return_values = self.body.get('ReturnValues', 'NONE')
|
||||||
update_expression = self.body.get('UpdateExpression')
|
update_expression = self.body.get('UpdateExpression')
|
||||||
attribute_updates = self.body.get('AttributeUpdates')
|
attribute_updates = self.body.get('AttributeUpdates')
|
||||||
expression_attribute_names = self.body.get(
|
expression_attribute_names = self.body.get(
|
||||||
@ -537,6 +626,15 @@ class DynamoHandler(BaseResponse):
|
|||||||
expression_attribute_values = self.body.get(
|
expression_attribute_values = self.body.get(
|
||||||
'ExpressionAttributeValues', {})
|
'ExpressionAttributeValues', {})
|
||||||
existing_item = self.dynamodb_backend.get_item(name, key)
|
existing_item = self.dynamodb_backend.get_item(name, key)
|
||||||
|
if existing_item:
|
||||||
|
existing_attributes = existing_item.to_json()['Attributes']
|
||||||
|
else:
|
||||||
|
existing_attributes = {}
|
||||||
|
|
||||||
|
if return_values not in ('NONE', 'ALL_OLD', 'ALL_NEW', 'UPDATED_OLD',
|
||||||
|
'UPDATED_NEW'):
|
||||||
|
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
|
||||||
|
return self.error(er, 'Return values set to invalid value')
|
||||||
|
|
||||||
if has_empty_keys_or_values(expression_attribute_values):
|
if has_empty_keys_or_values(expression_attribute_values):
|
||||||
return get_empty_str_error()
|
return get_empty_str_error()
|
||||||
@ -550,23 +648,11 @@ class DynamoHandler(BaseResponse):
|
|||||||
# expression
|
# expression
|
||||||
if not expected:
|
if not expected:
|
||||||
condition_expression = self.body.get('ConditionExpression')
|
condition_expression = self.body.get('ConditionExpression')
|
||||||
if condition_expression and 'OR' not in condition_expression:
|
expression_attribute_names = self.body.get('ExpressionAttributeNames', {})
|
||||||
cond_items = [c.strip()
|
expression_attribute_values = self.body.get('ExpressionAttributeValues', {})
|
||||||
for c in condition_expression.split('AND')]
|
expected = condition_expression_to_expected(condition_expression,
|
||||||
|
expression_attribute_names,
|
||||||
if cond_items:
|
expression_attribute_values)
|
||||||
expected = {}
|
|
||||||
exists_re = re.compile('^attribute_exists\s*\((.*)\)$')
|
|
||||||
not_exists_re = re.compile(
|
|
||||||
'^attribute_not_exists\s*\((.*)\)$')
|
|
||||||
|
|
||||||
for cond in cond_items:
|
|
||||||
exists_m = exists_re.match(cond)
|
|
||||||
not_exists_m = not_exists_re.match(cond)
|
|
||||||
if exists_m:
|
|
||||||
expected[exists_m.group(1)] = {'Exists': True}
|
|
||||||
elif not_exists_m:
|
|
||||||
expected[not_exists_m.group(1)] = {'Exists': False}
|
|
||||||
|
|
||||||
# Support spaces between operators in an update expression
|
# Support spaces between operators in an update expression
|
||||||
# E.g. `a = b + c` -> `a=b+c`
|
# E.g. `a = b + c` -> `a=b+c`
|
||||||
@ -591,8 +677,26 @@ class DynamoHandler(BaseResponse):
|
|||||||
'TableName': name,
|
'TableName': name,
|
||||||
'CapacityUnits': 0.5
|
'CapacityUnits': 0.5
|
||||||
}
|
}
|
||||||
if not existing_item:
|
unchanged_attributes = {
|
||||||
|
k for k in existing_attributes.keys()
|
||||||
|
if existing_attributes[k] == item_dict['Attributes'].get(k)
|
||||||
|
}
|
||||||
|
changed_attributes = set(existing_attributes.keys()).union(item_dict['Attributes'].keys()).difference(unchanged_attributes)
|
||||||
|
|
||||||
|
if return_values == 'NONE':
|
||||||
item_dict['Attributes'] = {}
|
item_dict['Attributes'] = {}
|
||||||
|
elif return_values == 'ALL_OLD':
|
||||||
|
item_dict['Attributes'] = existing_attributes
|
||||||
|
elif return_values == 'UPDATED_OLD':
|
||||||
|
item_dict['Attributes'] = {
|
||||||
|
k: v for k, v in existing_attributes.items()
|
||||||
|
if k in changed_attributes
|
||||||
|
}
|
||||||
|
elif return_values == 'UPDATED_NEW':
|
||||||
|
item_dict['Attributes'] = {
|
||||||
|
k: v for k, v in item_dict['Attributes'].items()
|
||||||
|
if k in changed_attributes
|
||||||
|
}
|
||||||
|
|
||||||
return dynamo_json_dump(item_dict)
|
return dynamo_json_dump(item_dict)
|
||||||
|
|
||||||
|
6
moto/dynamodbstreams/__init__.py
Normal file
6
moto/dynamodbstreams/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
from .models import dynamodbstreams_backends
|
||||||
|
from ..core.models import base_decorator
|
||||||
|
|
||||||
|
dynamodbstreams_backend = dynamodbstreams_backends['us-east-1']
|
||||||
|
mock_dynamodbstreams = base_decorator(dynamodbstreams_backends)
|
129
moto/dynamodbstreams/models.py
Normal file
129
moto/dynamodbstreams/models.py
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import boto3
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from moto.core import BaseBackend, BaseModel
|
||||||
|
from moto.dynamodb2.models import dynamodb_backends
|
||||||
|
|
||||||
|
|
||||||
|
class ShardIterator(BaseModel):
|
||||||
|
def __init__(self, streams_backend, stream_shard, shard_iterator_type, sequence_number=None):
|
||||||
|
self.id = base64.b64encode(os.urandom(472)).decode('utf-8')
|
||||||
|
self.streams_backend = streams_backend
|
||||||
|
self.stream_shard = stream_shard
|
||||||
|
self.shard_iterator_type = shard_iterator_type
|
||||||
|
if shard_iterator_type == 'TRIM_HORIZON':
|
||||||
|
self.sequence_number = stream_shard.starting_sequence_number
|
||||||
|
elif shard_iterator_type == 'LATEST':
|
||||||
|
self.sequence_number = stream_shard.starting_sequence_number + len(stream_shard.items)
|
||||||
|
elif shard_iterator_type == 'AT_SEQUENCE_NUMBER':
|
||||||
|
self.sequence_number = sequence_number
|
||||||
|
elif shard_iterator_type == 'AFTER_SEQUENCE_NUMBER':
|
||||||
|
self.sequence_number = sequence_number + 1
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arn(self):
|
||||||
|
return '{}/stream/{}|1|{}'.format(
|
||||||
|
self.stream_shard.table.table_arn,
|
||||||
|
self.stream_shard.table.latest_stream_label,
|
||||||
|
self.id)
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
return {
|
||||||
|
'ShardIterator': self.arn
|
||||||
|
}
|
||||||
|
|
||||||
|
def get(self, limit=1000):
|
||||||
|
items = self.stream_shard.get(self.sequence_number, limit)
|
||||||
|
try:
|
||||||
|
last_sequence_number = max(i['dynamodb']['SequenceNumber'] for i in items)
|
||||||
|
new_shard_iterator = ShardIterator(self.streams_backend,
|
||||||
|
self.stream_shard,
|
||||||
|
'AFTER_SEQUENCE_NUMBER',
|
||||||
|
last_sequence_number)
|
||||||
|
except ValueError:
|
||||||
|
new_shard_iterator = ShardIterator(self.streams_backend,
|
||||||
|
self.stream_shard,
|
||||||
|
'AT_SEQUENCE_NUMBER',
|
||||||
|
self.sequence_number)
|
||||||
|
|
||||||
|
self.streams_backend.shard_iterators[new_shard_iterator.arn] = new_shard_iterator
|
||||||
|
return {
|
||||||
|
'NextShardIterator': new_shard_iterator.arn,
|
||||||
|
'Records': items
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DynamoDBStreamsBackend(BaseBackend):
|
||||||
|
def __init__(self, region):
|
||||||
|
self.region = region
|
||||||
|
self.shard_iterators = {}
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
region = self.region
|
||||||
|
self.__dict__ = {}
|
||||||
|
self.__init__(region)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dynamodb(self):
|
||||||
|
return dynamodb_backends[self.region]
|
||||||
|
|
||||||
|
def _get_table_from_arn(self, arn):
|
||||||
|
table_name = arn.split(':', 6)[5].split('/')[1]
|
||||||
|
return self.dynamodb.get_table(table_name)
|
||||||
|
|
||||||
|
def describe_stream(self, arn):
|
||||||
|
table = self._get_table_from_arn(arn)
|
||||||
|
resp = {'StreamDescription': {
|
||||||
|
'StreamArn': arn,
|
||||||
|
'StreamLabel': table.latest_stream_label,
|
||||||
|
'StreamStatus': ('ENABLED' if table.latest_stream_label
|
||||||
|
else 'DISABLED'),
|
||||||
|
'StreamViewType': table.stream_specification['StreamViewType'],
|
||||||
|
'CreationRequestDateTime': table.stream_shard.created_on.isoformat(),
|
||||||
|
'TableName': table.name,
|
||||||
|
'KeySchema': table.schema,
|
||||||
|
'Shards': ([table.stream_shard.to_json()] if table.stream_shard
|
||||||
|
else [])
|
||||||
|
}}
|
||||||
|
|
||||||
|
return json.dumps(resp)
|
||||||
|
|
||||||
|
def list_streams(self, table_name=None):
|
||||||
|
streams = []
|
||||||
|
for table in self.dynamodb.tables.values():
|
||||||
|
if table_name is not None and table.name != table_name:
|
||||||
|
continue
|
||||||
|
if table.latest_stream_label:
|
||||||
|
d = table.describe(base_key='Table')
|
||||||
|
streams.append({
|
||||||
|
'StreamArn': d['Table']['LatestStreamArn'],
|
||||||
|
'TableName': d['Table']['TableName'],
|
||||||
|
'StreamLabel': d['Table']['LatestStreamLabel']
|
||||||
|
})
|
||||||
|
|
||||||
|
return json.dumps({'Streams': streams})
|
||||||
|
|
||||||
|
def get_shard_iterator(self, arn, shard_id, shard_iterator_type, sequence_number=None):
|
||||||
|
table = self._get_table_from_arn(arn)
|
||||||
|
assert table.stream_shard.id == shard_id
|
||||||
|
|
||||||
|
shard_iterator = ShardIterator(self, table.stream_shard,
|
||||||
|
shard_iterator_type,
|
||||||
|
sequence_number)
|
||||||
|
self.shard_iterators[shard_iterator.arn] = shard_iterator
|
||||||
|
|
||||||
|
return json.dumps(shard_iterator.to_json())
|
||||||
|
|
||||||
|
def get_records(self, iterator_arn, limit):
|
||||||
|
shard_iterator = self.shard_iterators[iterator_arn]
|
||||||
|
return json.dumps(shard_iterator.get(limit))
|
||||||
|
|
||||||
|
|
||||||
|
available_regions = boto3.session.Session().get_available_regions(
|
||||||
|
'dynamodbstreams')
|
||||||
|
dynamodbstreams_backends = {region: DynamoDBStreamsBackend(region=region)
|
||||||
|
for region in available_regions}
|
34
moto/dynamodbstreams/responses.py
Normal file
34
moto/dynamodbstreams/responses.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from moto.core.responses import BaseResponse
|
||||||
|
|
||||||
|
from .models import dynamodbstreams_backends
|
||||||
|
|
||||||
|
|
||||||
|
class DynamoDBStreamsHandler(BaseResponse):
|
||||||
|
|
||||||
|
@property
|
||||||
|
def backend(self):
|
||||||
|
return dynamodbstreams_backends[self.region]
|
||||||
|
|
||||||
|
def describe_stream(self):
|
||||||
|
arn = self._get_param('StreamArn')
|
||||||
|
return self.backend.describe_stream(arn)
|
||||||
|
|
||||||
|
def list_streams(self):
|
||||||
|
table_name = self._get_param('TableName')
|
||||||
|
return self.backend.list_streams(table_name)
|
||||||
|
|
||||||
|
def get_shard_iterator(self):
|
||||||
|
arn = self._get_param('StreamArn')
|
||||||
|
shard_id = self._get_param('ShardId')
|
||||||
|
shard_iterator_type = self._get_param('ShardIteratorType')
|
||||||
|
return self.backend.get_shard_iterator(arn, shard_id,
|
||||||
|
shard_iterator_type)
|
||||||
|
|
||||||
|
def get_records(self):
|
||||||
|
arn = self._get_param('ShardIterator')
|
||||||
|
limit = self._get_param('Limit')
|
||||||
|
if limit is None:
|
||||||
|
limit = 1000
|
||||||
|
return self.backend.get_records(arn, limit)
|
10
moto/dynamodbstreams/urls.py
Normal file
10
moto/dynamodbstreams/urls.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
from .responses import DynamoDBStreamsHandler
|
||||||
|
|
||||||
|
url_bases = [
|
||||||
|
"https?://streams.dynamodb.(.+).amazonaws.com"
|
||||||
|
]
|
||||||
|
|
||||||
|
url_paths = {
|
||||||
|
"{0}/$": DynamoDBStreamsHandler.dispatch,
|
||||||
|
}
|
72
moto/ec2/models.py
Executable file → Normal file
72
moto/ec2/models.py
Executable file → Normal file
@ -134,6 +134,8 @@ def utc_date_and_time():
|
|||||||
|
|
||||||
|
|
||||||
def validate_resource_ids(resource_ids):
|
def validate_resource_ids(resource_ids):
|
||||||
|
if not resource_ids:
|
||||||
|
raise MissingParameterError(parameter='resourceIdSet')
|
||||||
for resource_id in resource_ids:
|
for resource_id in resource_ids:
|
||||||
if not is_valid_resource_id(resource_id):
|
if not is_valid_resource_id(resource_id):
|
||||||
raise InvalidID(resource_id=resource_id)
|
raise InvalidID(resource_id=resource_id)
|
||||||
@ -189,7 +191,7 @@ class NetworkInterface(TaggedEC2Resource):
|
|||||||
self.ec2_backend = ec2_backend
|
self.ec2_backend = ec2_backend
|
||||||
self.id = random_eni_id()
|
self.id = random_eni_id()
|
||||||
self.device_index = device_index
|
self.device_index = device_index
|
||||||
self.private_ip_address = private_ip_address
|
self.private_ip_address = private_ip_address or random_private_ip()
|
||||||
self.subnet = subnet
|
self.subnet = subnet
|
||||||
self.instance = None
|
self.instance = None
|
||||||
self.attachment_id = None
|
self.attachment_id = None
|
||||||
@ -388,6 +390,7 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
|||||||
self.ebs_optimized = kwargs.get("ebs_optimized", False)
|
self.ebs_optimized = kwargs.get("ebs_optimized", False)
|
||||||
self.source_dest_check = "true"
|
self.source_dest_check = "true"
|
||||||
self.launch_time = utc_date_and_time()
|
self.launch_time = utc_date_and_time()
|
||||||
|
self.ami_launch_index = kwargs.get("ami_launch_index", 0)
|
||||||
self.disable_api_termination = kwargs.get("disable_api_termination", False)
|
self.disable_api_termination = kwargs.get("disable_api_termination", False)
|
||||||
self._spot_fleet_id = kwargs.get("spot_fleet_id", None)
|
self._spot_fleet_id = kwargs.get("spot_fleet_id", None)
|
||||||
associate_public_ip = kwargs.get("associate_public_ip", False)
|
associate_public_ip = kwargs.get("associate_public_ip", False)
|
||||||
@ -719,6 +722,7 @@ class InstanceBackend(object):
|
|||||||
instance_tags = tags.get('instance', {})
|
instance_tags = tags.get('instance', {})
|
||||||
|
|
||||||
for index in range(count):
|
for index in range(count):
|
||||||
|
kwargs["ami_launch_index"] = index
|
||||||
new_instance = Instance(
|
new_instance = Instance(
|
||||||
self,
|
self,
|
||||||
image_id,
|
image_id,
|
||||||
@ -1115,7 +1119,7 @@ class Ami(TaggedEC2Resource):
|
|||||||
elif filter_name == 'image-id':
|
elif filter_name == 'image-id':
|
||||||
return self.id
|
return self.id
|
||||||
elif filter_name == 'is-public':
|
elif filter_name == 'is-public':
|
||||||
return str(self.is_public)
|
return self.is_public_string
|
||||||
elif filter_name == 'state':
|
elif filter_name == 'state':
|
||||||
return self.state
|
return self.state
|
||||||
elif filter_name == 'name':
|
elif filter_name == 'name':
|
||||||
@ -2230,6 +2234,10 @@ class VPCPeeringConnectionStatus(object):
|
|||||||
self.code = code
|
self.code = code
|
||||||
self.message = message
|
self.message = message
|
||||||
|
|
||||||
|
def deleted(self):
|
||||||
|
self.code = 'deleted'
|
||||||
|
self.message = 'Deleted by {deleter ID}'
|
||||||
|
|
||||||
def initiating(self):
|
def initiating(self):
|
||||||
self.code = 'initiating-request'
|
self.code = 'initiating-request'
|
||||||
self.message = 'Initiating Request to {accepter ID}'
|
self.message = 'Initiating Request to {accepter ID}'
|
||||||
@ -2292,9 +2300,8 @@ class VPCPeeringConnectionBackend(object):
|
|||||||
return self.vpc_pcxs.get(vpc_pcx_id)
|
return self.vpc_pcxs.get(vpc_pcx_id)
|
||||||
|
|
||||||
def delete_vpc_peering_connection(self, vpc_pcx_id):
|
def delete_vpc_peering_connection(self, vpc_pcx_id):
|
||||||
deleted = self.vpc_pcxs.pop(vpc_pcx_id, None)
|
deleted = self.get_vpc_peering_connection(vpc_pcx_id)
|
||||||
if not deleted:
|
deleted._status.deleted()
|
||||||
raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id)
|
|
||||||
return deleted
|
return deleted
|
||||||
|
|
||||||
def accept_vpc_peering_connection(self, vpc_pcx_id):
|
def accept_vpc_peering_connection(self, vpc_pcx_id):
|
||||||
@ -2461,7 +2468,7 @@ class SubnetBackend(object):
|
|||||||
default_for_az, map_public_ip_on_launch)
|
default_for_az, map_public_ip_on_launch)
|
||||||
|
|
||||||
# AWS associates a new subnet with the default Network ACL
|
# AWS associates a new subnet with the default Network ACL
|
||||||
self.associate_default_network_acl_with_subnet(subnet_id)
|
self.associate_default_network_acl_with_subnet(subnet_id, vpc_id)
|
||||||
self.subnets[availability_zone][subnet_id] = subnet
|
self.subnets[availability_zone][subnet_id] = subnet
|
||||||
return subnet
|
return subnet
|
||||||
|
|
||||||
@ -2876,7 +2883,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource):
|
|||||||
def __init__(self, ec2_backend, spot_request_id, price, image_id, type,
|
def __init__(self, ec2_backend, spot_request_id, price, image_id, type,
|
||||||
valid_from, valid_until, launch_group, availability_zone_group,
|
valid_from, valid_until, launch_group, availability_zone_group,
|
||||||
key_name, security_groups, user_data, instance_type, placement,
|
key_name, security_groups, user_data, instance_type, placement,
|
||||||
kernel_id, ramdisk_id, monitoring_enabled, subnet_id, spot_fleet_id,
|
kernel_id, ramdisk_id, monitoring_enabled, subnet_id, tags, spot_fleet_id,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
super(SpotInstanceRequest, self).__init__(**kwargs)
|
super(SpotInstanceRequest, self).__init__(**kwargs)
|
||||||
ls = LaunchSpecification()
|
ls = LaunchSpecification()
|
||||||
@ -2900,6 +2907,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource):
|
|||||||
ls.monitored = monitoring_enabled
|
ls.monitored = monitoring_enabled
|
||||||
ls.subnet_id = subnet_id
|
ls.subnet_id = subnet_id
|
||||||
self.spot_fleet_id = spot_fleet_id
|
self.spot_fleet_id = spot_fleet_id
|
||||||
|
self.tags = tags
|
||||||
|
|
||||||
if security_groups:
|
if security_groups:
|
||||||
for group_name in security_groups:
|
for group_name in security_groups:
|
||||||
@ -2933,6 +2941,7 @@ class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource):
|
|||||||
security_group_names=[],
|
security_group_names=[],
|
||||||
security_group_ids=self.launch_specification.groups,
|
security_group_ids=self.launch_specification.groups,
|
||||||
spot_fleet_id=self.spot_fleet_id,
|
spot_fleet_id=self.spot_fleet_id,
|
||||||
|
tags=self.tags,
|
||||||
)
|
)
|
||||||
instance = reservation.instances[0]
|
instance = reservation.instances[0]
|
||||||
return instance
|
return instance
|
||||||
@ -2948,15 +2957,16 @@ class SpotRequestBackend(object):
|
|||||||
valid_until, launch_group, availability_zone_group,
|
valid_until, launch_group, availability_zone_group,
|
||||||
key_name, security_groups, user_data,
|
key_name, security_groups, user_data,
|
||||||
instance_type, placement, kernel_id, ramdisk_id,
|
instance_type, placement, kernel_id, ramdisk_id,
|
||||||
monitoring_enabled, subnet_id, spot_fleet_id=None):
|
monitoring_enabled, subnet_id, tags=None, spot_fleet_id=None):
|
||||||
requests = []
|
requests = []
|
||||||
|
tags = tags or {}
|
||||||
for _ in range(count):
|
for _ in range(count):
|
||||||
spot_request_id = random_spot_request_id()
|
spot_request_id = random_spot_request_id()
|
||||||
request = SpotInstanceRequest(self,
|
request = SpotInstanceRequest(self,
|
||||||
spot_request_id, price, image_id, type, valid_from, valid_until,
|
spot_request_id, price, image_id, type, valid_from, valid_until,
|
||||||
launch_group, availability_zone_group, key_name, security_groups,
|
launch_group, availability_zone_group, key_name, security_groups,
|
||||||
user_data, instance_type, placement, kernel_id, ramdisk_id,
|
user_data, instance_type, placement, kernel_id, ramdisk_id,
|
||||||
monitoring_enabled, subnet_id, spot_fleet_id)
|
monitoring_enabled, subnet_id, tags, spot_fleet_id)
|
||||||
self.spot_instance_requests[spot_request_id] = request
|
self.spot_instance_requests[spot_request_id] = request
|
||||||
requests.append(request)
|
requests.append(request)
|
||||||
return requests
|
return requests
|
||||||
@ -2976,8 +2986,8 @@ class SpotRequestBackend(object):
|
|||||||
|
|
||||||
class SpotFleetLaunchSpec(object):
|
class SpotFleetLaunchSpec(object):
|
||||||
def __init__(self, ebs_optimized, group_set, iam_instance_profile, image_id,
|
def __init__(self, ebs_optimized, group_set, iam_instance_profile, image_id,
|
||||||
instance_type, key_name, monitoring, spot_price, subnet_id, user_data,
|
instance_type, key_name, monitoring, spot_price, subnet_id, tag_specifications,
|
||||||
weighted_capacity):
|
user_data, weighted_capacity):
|
||||||
self.ebs_optimized = ebs_optimized
|
self.ebs_optimized = ebs_optimized
|
||||||
self.group_set = group_set
|
self.group_set = group_set
|
||||||
self.iam_instance_profile = iam_instance_profile
|
self.iam_instance_profile = iam_instance_profile
|
||||||
@ -2987,6 +2997,7 @@ class SpotFleetLaunchSpec(object):
|
|||||||
self.monitoring = monitoring
|
self.monitoring = monitoring
|
||||||
self.spot_price = spot_price
|
self.spot_price = spot_price
|
||||||
self.subnet_id = subnet_id
|
self.subnet_id = subnet_id
|
||||||
|
self.tag_specifications = tag_specifications
|
||||||
self.user_data = user_data
|
self.user_data = user_data
|
||||||
self.weighted_capacity = float(weighted_capacity)
|
self.weighted_capacity = float(weighted_capacity)
|
||||||
|
|
||||||
@ -3017,6 +3028,7 @@ class SpotFleetRequest(TaggedEC2Resource):
|
|||||||
monitoring=spec.get('monitoring._enabled'),
|
monitoring=spec.get('monitoring._enabled'),
|
||||||
spot_price=spec.get('spot_price', self.spot_price),
|
spot_price=spec.get('spot_price', self.spot_price),
|
||||||
subnet_id=spec['subnet_id'],
|
subnet_id=spec['subnet_id'],
|
||||||
|
tag_specifications=self._parse_tag_specifications(spec),
|
||||||
user_data=spec.get('user_data'),
|
user_data=spec.get('user_data'),
|
||||||
weighted_capacity=spec['weighted_capacity'],
|
weighted_capacity=spec['weighted_capacity'],
|
||||||
)
|
)
|
||||||
@ -3099,6 +3111,7 @@ class SpotFleetRequest(TaggedEC2Resource):
|
|||||||
monitoring_enabled=launch_spec.monitoring,
|
monitoring_enabled=launch_spec.monitoring,
|
||||||
subnet_id=launch_spec.subnet_id,
|
subnet_id=launch_spec.subnet_id,
|
||||||
spot_fleet_id=self.id,
|
spot_fleet_id=self.id,
|
||||||
|
tags=launch_spec.tag_specifications,
|
||||||
)
|
)
|
||||||
self.spot_requests.extend(requests)
|
self.spot_requests.extend(requests)
|
||||||
self.fulfilled_capacity += added_weight
|
self.fulfilled_capacity += added_weight
|
||||||
@ -3121,6 +3134,25 @@ class SpotFleetRequest(TaggedEC2Resource):
|
|||||||
self.spot_requests = [req for req in self.spot_requests if req.instance.id not in instance_ids]
|
self.spot_requests = [req for req in self.spot_requests if req.instance.id not in instance_ids]
|
||||||
self.ec2_backend.terminate_instances(instance_ids)
|
self.ec2_backend.terminate_instances(instance_ids)
|
||||||
|
|
||||||
|
def _parse_tag_specifications(self, spec):
|
||||||
|
try:
|
||||||
|
tag_spec_num = max([int(key.split('.')[1]) for key in spec if key.startswith("tag_specification_set")])
|
||||||
|
except ValueError: # no tag specifications
|
||||||
|
return {}
|
||||||
|
|
||||||
|
tag_specifications = {}
|
||||||
|
for si in range(1, tag_spec_num + 1):
|
||||||
|
resource_type = spec["tag_specification_set.{si}._resource_type".format(si=si)]
|
||||||
|
|
||||||
|
tags = [key for key in spec if key.startswith("tag_specification_set.{si}._tag".format(si=si))]
|
||||||
|
tag_num = max([int(key.split('.')[3]) for key in tags])
|
||||||
|
tag_specifications[resource_type] = dict((
|
||||||
|
spec["tag_specification_set.{si}._tag.{ti}._key".format(si=si, ti=ti)],
|
||||||
|
spec["tag_specification_set.{si}._tag.{ti}._value".format(si=si, ti=ti)],
|
||||||
|
) for ti in range(1, tag_num + 1))
|
||||||
|
|
||||||
|
return tag_specifications
|
||||||
|
|
||||||
|
|
||||||
class SpotFleetBackend(object):
|
class SpotFleetBackend(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -3557,8 +3589,22 @@ class NetworkAclBackend(object):
|
|||||||
self.get_vpc(vpc_id)
|
self.get_vpc(vpc_id)
|
||||||
network_acl = NetworkAcl(self, network_acl_id, vpc_id, default)
|
network_acl = NetworkAcl(self, network_acl_id, vpc_id, default)
|
||||||
self.network_acls[network_acl_id] = network_acl
|
self.network_acls[network_acl_id] = network_acl
|
||||||
|
if default:
|
||||||
|
self.add_default_entries(network_acl_id)
|
||||||
return network_acl
|
return network_acl
|
||||||
|
|
||||||
|
def add_default_entries(self, network_acl_id):
|
||||||
|
default_acl_entries = [
|
||||||
|
{'rule_number': 100, 'rule_action': 'allow', 'egress': 'true'},
|
||||||
|
{'rule_number': 32767, 'rule_action': 'deny', 'egress': 'true'},
|
||||||
|
{'rule_number': 100, 'rule_action': 'allow', 'egress': 'false'},
|
||||||
|
{'rule_number': 32767, 'rule_action': 'deny', 'egress': 'false'}
|
||||||
|
]
|
||||||
|
for entry in default_acl_entries:
|
||||||
|
self.create_network_acl_entry(network_acl_id=network_acl_id, rule_number=entry['rule_number'], protocol='-1',
|
||||||
|
rule_action=entry['rule_action'], egress=entry['egress'], cidr_block='0.0.0.0/0',
|
||||||
|
icmp_code=None, icmp_type=None, port_range_from=None, port_range_to=None)
|
||||||
|
|
||||||
def get_all_network_acls(self, network_acl_ids=None, filters=None):
|
def get_all_network_acls(self, network_acl_ids=None, filters=None):
|
||||||
network_acls = self.network_acls.values()
|
network_acls = self.network_acls.values()
|
||||||
|
|
||||||
@ -3633,9 +3679,9 @@ class NetworkAclBackend(object):
|
|||||||
new_acl.associations[new_assoc_id] = association
|
new_acl.associations[new_assoc_id] = association
|
||||||
return association
|
return association
|
||||||
|
|
||||||
def associate_default_network_acl_with_subnet(self, subnet_id):
|
def associate_default_network_acl_with_subnet(self, subnet_id, vpc_id):
|
||||||
association_id = random_network_acl_subnet_association_id()
|
association_id = random_network_acl_subnet_association_id()
|
||||||
acl = next(acl for acl in self.network_acls.values() if acl.default)
|
acl = next(acl for acl in self.network_acls.values() if acl.default and acl.vpc_id == vpc_id)
|
||||||
acl.associations[association_id] = NetworkAclAssociation(self, association_id,
|
acl.associations[association_id] = NetworkAclAssociation(self, association_id,
|
||||||
subnet_id, acl.id)
|
subnet_id, acl.id)
|
||||||
|
|
||||||
|
@ -150,6 +150,7 @@ CREATE_VOLUME_RESPONSE = """<CreateVolumeResponse xmlns="http://ec2.amazonaws.co
|
|||||||
<availabilityZone>{{ volume.zone.name }}</availabilityZone>
|
<availabilityZone>{{ volume.zone.name }}</availabilityZone>
|
||||||
<status>creating</status>
|
<status>creating</status>
|
||||||
<createTime>{{ volume.create_time}}</createTime>
|
<createTime>{{ volume.create_time}}</createTime>
|
||||||
|
{% if volume.get_tags() %}
|
||||||
<tagSet>
|
<tagSet>
|
||||||
{% for tag in volume.get_tags() %}
|
{% for tag in volume.get_tags() %}
|
||||||
<item>
|
<item>
|
||||||
@ -160,6 +161,7 @@ CREATE_VOLUME_RESPONSE = """<CreateVolumeResponse xmlns="http://ec2.amazonaws.co
|
|||||||
</item>
|
</item>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tagSet>
|
</tagSet>
|
||||||
|
{% endif %}
|
||||||
<volumeType>standard</volumeType>
|
<volumeType>standard</volumeType>
|
||||||
</CreateVolumeResponse>"""
|
</CreateVolumeResponse>"""
|
||||||
|
|
||||||
@ -191,6 +193,7 @@ DESCRIBE_VOLUMES_RESPONSE = """<DescribeVolumesResponse xmlns="http://ec2.amazon
|
|||||||
</item>
|
</item>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</attachmentSet>
|
</attachmentSet>
|
||||||
|
{% if volume.get_tags() %}
|
||||||
<tagSet>
|
<tagSet>
|
||||||
{% for tag in volume.get_tags() %}
|
{% for tag in volume.get_tags() %}
|
||||||
<item>
|
<item>
|
||||||
@ -201,6 +204,7 @@ DESCRIBE_VOLUMES_RESPONSE = """<DescribeVolumesResponse xmlns="http://ec2.amazon
|
|||||||
</item>
|
</item>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tagSet>
|
</tagSet>
|
||||||
|
{% endif %}
|
||||||
<volumeType>standard</volumeType>
|
<volumeType>standard</volumeType>
|
||||||
</item>
|
</item>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -244,7 +244,7 @@ EC2_RUN_INSTANCES = """<RunInstancesResponse xmlns="http://ec2.amazonaws.com/doc
|
|||||||
<reason/>
|
<reason/>
|
||||||
<keyName>{{ instance.key_name }}</keyName>
|
<keyName>{{ instance.key_name }}</keyName>
|
||||||
<ebsOptimized>{{ instance.ebs_optimized }}</ebsOptimized>
|
<ebsOptimized>{{ instance.ebs_optimized }}</ebsOptimized>
|
||||||
<amiLaunchIndex>0</amiLaunchIndex>
|
<amiLaunchIndex>{{ instance.ami_launch_index }}</amiLaunchIndex>
|
||||||
<instanceType>{{ instance.instance_type }}</instanceType>
|
<instanceType>{{ instance.instance_type }}</instanceType>
|
||||||
<launchTime>{{ instance.launch_time }}</launchTime>
|
<launchTime>{{ instance.launch_time }}</launchTime>
|
||||||
<placement>
|
<placement>
|
||||||
@ -384,7 +384,7 @@ EC2_DESCRIBE_INSTANCES = """<DescribeInstancesResponse xmlns="http://ec2.amazona
|
|||||||
<reason>{{ instance._reason }}</reason>
|
<reason>{{ instance._reason }}</reason>
|
||||||
<keyName>{{ instance.key_name }}</keyName>
|
<keyName>{{ instance.key_name }}</keyName>
|
||||||
<ebsOptimized>{{ instance.ebs_optimized }}</ebsOptimized>
|
<ebsOptimized>{{ instance.ebs_optimized }}</ebsOptimized>
|
||||||
<amiLaunchIndex>0</amiLaunchIndex>
|
<amiLaunchIndex>{{ instance.ami_launch_index }}</amiLaunchIndex>
|
||||||
<productCodes/>
|
<productCodes/>
|
||||||
<instanceType>{{ instance.instance_type }}</instanceType>
|
<instanceType>{{ instance.instance_type }}</instanceType>
|
||||||
<launchTime>{{ instance.launch_time }}</launchTime>
|
<launchTime>{{ instance.launch_time }}</launchTime>
|
||||||
@ -450,6 +450,7 @@ EC2_DESCRIBE_INSTANCES = """<DescribeInstancesResponse xmlns="http://ec2.amazona
|
|||||||
</blockDeviceMapping>
|
</blockDeviceMapping>
|
||||||
<virtualizationType>{{ instance.virtualization_type }}</virtualizationType>
|
<virtualizationType>{{ instance.virtualization_type }}</virtualizationType>
|
||||||
<clientToken>ABCDE1234567890123</clientToken>
|
<clientToken>ABCDE1234567890123</clientToken>
|
||||||
|
{% if instance.get_tags() %}
|
||||||
<tagSet>
|
<tagSet>
|
||||||
{% for tag in instance.get_tags() %}
|
{% for tag in instance.get_tags() %}
|
||||||
<item>
|
<item>
|
||||||
@ -460,6 +461,7 @@ EC2_DESCRIBE_INSTANCES = """<DescribeInstancesResponse xmlns="http://ec2.amazona
|
|||||||
</item>
|
</item>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tagSet>
|
</tagSet>
|
||||||
|
{% endif %}
|
||||||
<hypervisor>xen</hypervisor>
|
<hypervisor>xen</hypervisor>
|
||||||
<networkInterfaceSet>
|
<networkInterfaceSet>
|
||||||
{% for nic in instance.nics.values() %}
|
{% for nic in instance.nics.values() %}
|
||||||
|
@ -107,6 +107,21 @@ DESCRIBE_SPOT_FLEET_TEMPLATE = """<DescribeSpotFleetRequestsResponse xmlns="http
|
|||||||
</item>
|
</item>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</groupSet>
|
</groupSet>
|
||||||
|
<tagSpecificationSet>
|
||||||
|
{% for resource_type in launch_spec.tag_specifications %}
|
||||||
|
<item>
|
||||||
|
<resourceType>{{ resource_type }}</resourceType>
|
||||||
|
<tag>
|
||||||
|
{% for key, value in launch_spec.tag_specifications[resource_type].items() %}
|
||||||
|
<item>
|
||||||
|
<key>{{ key }}</key>
|
||||||
|
<value>{{ value }}</value>
|
||||||
|
</item>
|
||||||
|
{% endfor %}
|
||||||
|
</tag>
|
||||||
|
</item>
|
||||||
|
{% endfor %}
|
||||||
|
</tagSpecificationSet>
|
||||||
</item>
|
</item>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</launchSpecifications>
|
</launchSpecifications>
|
||||||
|
@ -3,6 +3,7 @@ from .responses import ECRResponse
|
|||||||
|
|
||||||
url_bases = [
|
url_bases = [
|
||||||
"https?://ecr.(.+).amazonaws.com",
|
"https?://ecr.(.+).amazonaws.com",
|
||||||
|
"https?://api.ecr.(.+).amazonaws.com",
|
||||||
]
|
]
|
||||||
|
|
||||||
url_paths = {
|
url_paths = {
|
||||||
|
@ -94,6 +94,12 @@ class Cluster(BaseObject):
|
|||||||
# no-op when nothing changed between old and new resources
|
# no-op when nothing changed between old and new resources
|
||||||
return original_resource
|
return original_resource
|
||||||
|
|
||||||
|
def get_cfn_attribute(self, attribute_name):
|
||||||
|
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
||||||
|
if attribute_name == 'Arn':
|
||||||
|
return self.arn
|
||||||
|
raise UnformattedGetAttTemplateException()
|
||||||
|
|
||||||
|
|
||||||
class TaskDefinition(BaseObject):
|
class TaskDefinition(BaseObject):
|
||||||
|
|
||||||
@ -271,6 +277,12 @@ class Service(BaseObject):
|
|||||||
else:
|
else:
|
||||||
return ecs_backend.update_service(cluster_name, service_name, task_definition, desired_count)
|
return ecs_backend.update_service(cluster_name, service_name, task_definition, desired_count)
|
||||||
|
|
||||||
|
def get_cfn_attribute(self, attribute_name):
|
||||||
|
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
||||||
|
if attribute_name == 'Name':
|
||||||
|
return self.name
|
||||||
|
raise UnformattedGetAttTemplateException()
|
||||||
|
|
||||||
|
|
||||||
class ContainerInstance(BaseObject):
|
class ContainerInstance(BaseObject):
|
||||||
|
|
||||||
@ -358,6 +370,20 @@ class ContainerInstance(BaseObject):
|
|||||||
return formatted_attr
|
return formatted_attr
|
||||||
|
|
||||||
|
|
||||||
|
class ClusterFailure(BaseObject):
|
||||||
|
def __init__(self, reason, cluster_name):
|
||||||
|
self.reason = reason
|
||||||
|
self.arn = "arn:aws:ecs:us-east-1:012345678910:cluster/{0}".format(
|
||||||
|
cluster_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def response_object(self):
|
||||||
|
response_object = self.gen_response_object()
|
||||||
|
response_object['reason'] = self.reason
|
||||||
|
response_object['arn'] = self.arn
|
||||||
|
return response_object
|
||||||
|
|
||||||
|
|
||||||
class ContainerInstanceFailure(BaseObject):
|
class ContainerInstanceFailure(BaseObject):
|
||||||
|
|
||||||
def __init__(self, reason, container_instance_id):
|
def __init__(self, reason, container_instance_id):
|
||||||
@ -419,6 +445,7 @@ class EC2ContainerServiceBackend(BaseBackend):
|
|||||||
|
|
||||||
def describe_clusters(self, list_clusters_name=None):
|
def describe_clusters(self, list_clusters_name=None):
|
||||||
list_clusters = []
|
list_clusters = []
|
||||||
|
failures = []
|
||||||
if list_clusters_name is None:
|
if list_clusters_name is None:
|
||||||
if 'default' in self.clusters:
|
if 'default' in self.clusters:
|
||||||
list_clusters.append(self.clusters['default'].response_object)
|
list_clusters.append(self.clusters['default'].response_object)
|
||||||
@ -429,9 +456,8 @@ class EC2ContainerServiceBackend(BaseBackend):
|
|||||||
list_clusters.append(
|
list_clusters.append(
|
||||||
self.clusters[cluster_name].response_object)
|
self.clusters[cluster_name].response_object)
|
||||||
else:
|
else:
|
||||||
raise Exception(
|
failures.append(ClusterFailure('MISSING', cluster_name))
|
||||||
"{0} is not a cluster".format(cluster_name))
|
return list_clusters, failures
|
||||||
return list_clusters
|
|
||||||
|
|
||||||
def delete_cluster(self, cluster_str):
|
def delete_cluster(self, cluster_str):
|
||||||
cluster_name = cluster_str.split('/')[-1]
|
cluster_name = cluster_str.split('/')[-1]
|
||||||
@ -673,12 +699,15 @@ class EC2ContainerServiceBackend(BaseBackend):
|
|||||||
|
|
||||||
return service
|
return service
|
||||||
|
|
||||||
def list_services(self, cluster_str):
|
def list_services(self, cluster_str, scheduling_strategy=None):
|
||||||
cluster_name = cluster_str.split('/')[-1]
|
cluster_name = cluster_str.split('/')[-1]
|
||||||
service_arns = []
|
service_arns = []
|
||||||
for key, value in self.services.items():
|
for key, value in self.services.items():
|
||||||
if cluster_name + ':' in key:
|
if cluster_name + ':' in key:
|
||||||
service_arns.append(self.services[key].arn)
|
service = self.services[key]
|
||||||
|
if scheduling_strategy is None or service.scheduling_strategy == scheduling_strategy:
|
||||||
|
service_arns.append(service.arn)
|
||||||
|
|
||||||
return sorted(service_arns)
|
return sorted(service_arns)
|
||||||
|
|
||||||
def describe_services(self, cluster_str, service_names_or_arns):
|
def describe_services(self, cluster_str, service_names_or_arns):
|
||||||
@ -769,6 +798,8 @@ class EC2ContainerServiceBackend(BaseBackend):
|
|||||||
Container instances status should be one of [ACTIVE,DRAINING]")
|
Container instances status should be one of [ACTIVE,DRAINING]")
|
||||||
failures = []
|
failures = []
|
||||||
container_instance_objects = []
|
container_instance_objects = []
|
||||||
|
list_container_instance_ids = [x.split('/')[-1]
|
||||||
|
for x in list_container_instance_ids]
|
||||||
for container_instance_id in list_container_instance_ids:
|
for container_instance_id in list_container_instance_ids:
|
||||||
container_instance = self.container_instances[cluster_name].get(container_instance_id, None)
|
container_instance = self.container_instances[cluster_name].get(container_instance_id, None)
|
||||||
if container_instance is not None:
|
if container_instance is not None:
|
||||||
|
@ -45,10 +45,10 @@ class EC2ContainerServiceResponse(BaseResponse):
|
|||||||
|
|
||||||
def describe_clusters(self):
|
def describe_clusters(self):
|
||||||
list_clusters_name = self._get_param('clusters')
|
list_clusters_name = self._get_param('clusters')
|
||||||
clusters = self.ecs_backend.describe_clusters(list_clusters_name)
|
clusters, failures = self.ecs_backend.describe_clusters(list_clusters_name)
|
||||||
return json.dumps({
|
return json.dumps({
|
||||||
'clusters': clusters,
|
'clusters': clusters,
|
||||||
'failures': []
|
'failures': [cluster.response_object for cluster in failures]
|
||||||
})
|
})
|
||||||
|
|
||||||
def delete_cluster(self):
|
def delete_cluster(self):
|
||||||
@ -163,7 +163,8 @@ class EC2ContainerServiceResponse(BaseResponse):
|
|||||||
|
|
||||||
def list_services(self):
|
def list_services(self):
|
||||||
cluster_str = self._get_param('cluster')
|
cluster_str = self._get_param('cluster')
|
||||||
service_arns = self.ecs_backend.list_services(cluster_str)
|
scheduling_strategy = self._get_param('schedulingStrategy')
|
||||||
|
service_arns = self.ecs_backend.list_services(cluster_str, scheduling_strategy)
|
||||||
return json.dumps({
|
return json.dumps({
|
||||||
'serviceArns': service_arns
|
'serviceArns': service_arns
|
||||||
# ,
|
# ,
|
||||||
|
@ -613,13 +613,11 @@ DESCRIBE_STEP_TEMPLATE = """<DescribeStepResponse xmlns="http://elasticmapreduce
|
|||||||
<Id>{{ step.id }}</Id>
|
<Id>{{ step.id }}</Id>
|
||||||
<Name>{{ step.name | escape }}</Name>
|
<Name>{{ step.name | escape }}</Name>
|
||||||
<Status>
|
<Status>
|
||||||
<!-- does not exist for botocore 1.4.28
|
|
||||||
<FailureDetails>
|
<FailureDetails>
|
||||||
<Reason/>
|
<Reason/>
|
||||||
<Message/>
|
<Message/>
|
||||||
<LogFile/>
|
<LogFile/>
|
||||||
</FailureDetails>
|
</FailureDetails>
|
||||||
-->
|
|
||||||
<State>{{ step.state }}</State>
|
<State>{{ step.state }}</State>
|
||||||
<StateChangeReason>{{ step.state_change_reason }}</StateChangeReason>
|
<StateChangeReason>{{ step.state_change_reason }}</StateChangeReason>
|
||||||
<Timeline>
|
<Timeline>
|
||||||
|
@ -24,3 +24,56 @@ class IAMReportNotPresentException(RESTError):
|
|||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super(IAMReportNotPresentException, self).__init__(
|
super(IAMReportNotPresentException, self).__init__(
|
||||||
"ReportNotPresent", message)
|
"ReportNotPresent", message)
|
||||||
|
|
||||||
|
|
||||||
|
class MalformedCertificate(RESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, cert):
|
||||||
|
super(MalformedCertificate, self).__init__(
|
||||||
|
'MalformedCertificate', 'Certificate {cert} is malformed'.format(cert=cert))
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateTags(RESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(DuplicateTags, self).__init__(
|
||||||
|
'InvalidInput', 'Duplicate tag keys found. Please note that Tag keys are case insensitive.')
|
||||||
|
|
||||||
|
|
||||||
|
class TagKeyTooBig(RESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, tag, param='tags.X.member.key'):
|
||||||
|
super(TagKeyTooBig, self).__init__(
|
||||||
|
'ValidationError', "1 validation error detected: Value '{}' at '{}' failed to satisfy "
|
||||||
|
"constraint: Member must have length less than or equal to 128.".format(tag, param))
|
||||||
|
|
||||||
|
|
||||||
|
class TagValueTooBig(RESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, tag):
|
||||||
|
super(TagValueTooBig, self).__init__(
|
||||||
|
'ValidationError', "1 validation error detected: Value '{}' at 'tags.X.member.value' failed to satisfy "
|
||||||
|
"constraint: Member must have length less than or equal to 256.".format(tag))
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidTagCharacters(RESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, tag, param='tags.X.member.key'):
|
||||||
|
message = "1 validation error detected: Value '{}' at '{}' failed to satisfy ".format(tag, param)
|
||||||
|
message += "constraint: Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+"
|
||||||
|
|
||||||
|
super(InvalidTagCharacters, self).__init__('ValidationError', message)
|
||||||
|
|
||||||
|
|
||||||
|
class TooManyTags(RESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, tags, param='tags'):
|
||||||
|
super(TooManyTags, self).__init__(
|
||||||
|
'ValidationError', "1 validation error detected: Value '{}' at '{}' failed to satisfy "
|
||||||
|
"constraint: Member must have length less than or equal to 50.".format(tags, param))
|
||||||
|
@ -1,14 +1,20 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import base64
|
import base64
|
||||||
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
||||||
|
|
||||||
from .aws_managed_policies import aws_managed_policies_data
|
from .aws_managed_policies import aws_managed_policies_data
|
||||||
from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException
|
from .exceptions import IAMNotFoundException, IAMConflictException, IAMReportNotPresentException, MalformedCertificate, \
|
||||||
|
DuplicateTags, TagKeyTooBig, InvalidTagCharacters, TooManyTags, TagValueTooBig
|
||||||
from .utils import random_access_key, random_alphanumeric, random_resource_id, random_policy_id
|
from .utils import random_access_key, random_alphanumeric, random_resource_id, random_policy_id
|
||||||
|
|
||||||
ACCOUNT_ID = 123456789012
|
ACCOUNT_ID = 123456789012
|
||||||
@ -28,7 +34,6 @@ class MFADevice(object):
|
|||||||
|
|
||||||
|
|
||||||
class Policy(BaseModel):
|
class Policy(BaseModel):
|
||||||
|
|
||||||
is_attachable = False
|
is_attachable = False
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
@ -43,13 +48,29 @@ class Policy(BaseModel):
|
|||||||
self.description = description or ''
|
self.description = description or ''
|
||||||
self.id = random_policy_id()
|
self.id = random_policy_id()
|
||||||
self.path = path or '/'
|
self.path = path or '/'
|
||||||
self.default_version_id = default_version_id or 'v1'
|
|
||||||
|
if default_version_id:
|
||||||
|
self.default_version_id = default_version_id
|
||||||
|
self.next_version_num = int(default_version_id.lstrip('v')) + 1
|
||||||
|
else:
|
||||||
|
self.default_version_id = 'v1'
|
||||||
|
self.next_version_num = 2
|
||||||
self.versions = [PolicyVersion(self.arn, document, True)]
|
self.versions = [PolicyVersion(self.arn, document, True)]
|
||||||
|
|
||||||
self.create_datetime = datetime.now(pytz.utc)
|
self.create_datetime = datetime.now(pytz.utc)
|
||||||
self.update_datetime = datetime.now(pytz.utc)
|
self.update_datetime = datetime.now(pytz.utc)
|
||||||
|
|
||||||
|
|
||||||
|
class SAMLProvider(BaseModel):
|
||||||
|
def __init__(self, name, saml_metadata_document=None):
|
||||||
|
self.name = name
|
||||||
|
self.saml_metadata_document = saml_metadata_document
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arn(self):
|
||||||
|
return "arn:aws:iam::{0}:saml-provider/{1}".format(ACCOUNT_ID, self.name)
|
||||||
|
|
||||||
|
|
||||||
class PolicyVersion(object):
|
class PolicyVersion(object):
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
@ -114,9 +135,12 @@ class Role(BaseModel):
|
|||||||
self.id = role_id
|
self.id = role_id
|
||||||
self.name = name
|
self.name = name
|
||||||
self.assume_role_policy_document = assume_role_policy_document
|
self.assume_role_policy_document = assume_role_policy_document
|
||||||
self.path = path
|
self.path = path or '/'
|
||||||
self.policies = {}
|
self.policies = {}
|
||||||
self.managed_policies = {}
|
self.managed_policies = {}
|
||||||
|
self.create_date = datetime.now(pytz.utc)
|
||||||
|
self.tags = {}
|
||||||
|
self.description = ""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
|
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
|
||||||
@ -160,14 +184,18 @@ class Role(BaseModel):
|
|||||||
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "Arn" ]"')
|
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "Arn" ]"')
|
||||||
raise UnformattedGetAttTemplateException()
|
raise UnformattedGetAttTemplateException()
|
||||||
|
|
||||||
|
def get_tags(self):
|
||||||
|
return [self.tags[tag] for tag in self.tags]
|
||||||
|
|
||||||
|
|
||||||
class InstanceProfile(BaseModel):
|
class InstanceProfile(BaseModel):
|
||||||
|
|
||||||
def __init__(self, instance_profile_id, name, path, roles):
|
def __init__(self, instance_profile_id, name, path, roles):
|
||||||
self.id = instance_profile_id
|
self.id = instance_profile_id
|
||||||
self.name = name
|
self.name = name
|
||||||
self.path = path
|
self.path = path or '/'
|
||||||
self.roles = roles if roles else []
|
self.roles = roles if roles else []
|
||||||
|
self.create_date = datetime.now(pytz.utc)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
|
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
|
||||||
@ -191,7 +219,7 @@ class InstanceProfile(BaseModel):
|
|||||||
def get_cfn_attribute(self, attribute_name):
|
def get_cfn_attribute(self, attribute_name):
|
||||||
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
||||||
if attribute_name == 'Arn':
|
if attribute_name == 'Arn':
|
||||||
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "Arn" ]"')
|
return self.arn
|
||||||
raise UnformattedGetAttTemplateException()
|
raise UnformattedGetAttTemplateException()
|
||||||
|
|
||||||
|
|
||||||
@ -213,6 +241,16 @@ class Certificate(BaseModel):
|
|||||||
return "arn:aws:iam::{0}:server-certificate{1}{2}".format(ACCOUNT_ID, self.path, self.cert_name)
|
return "arn:aws:iam::{0}:server-certificate{1}{2}".format(ACCOUNT_ID, self.path, self.cert_name)
|
||||||
|
|
||||||
|
|
||||||
|
class SigningCertificate(BaseModel):
|
||||||
|
|
||||||
|
def __init__(self, id, user_name, body):
|
||||||
|
self.id = id
|
||||||
|
self.user_name = user_name
|
||||||
|
self.body = body
|
||||||
|
self.upload_date = datetime.strftime(datetime.utcnow(), "%Y-%m-%d-%H-%M-%S")
|
||||||
|
self.status = 'Active'
|
||||||
|
|
||||||
|
|
||||||
class AccessKey(BaseModel):
|
class AccessKey(BaseModel):
|
||||||
|
|
||||||
def __init__(self, user_name):
|
def __init__(self, user_name):
|
||||||
@ -224,6 +262,10 @@ class AccessKey(BaseModel):
|
|||||||
datetime.utcnow(),
|
datetime.utcnow(),
|
||||||
"%Y-%m-%dT%H:%M:%SZ"
|
"%Y-%m-%dT%H:%M:%SZ"
|
||||||
)
|
)
|
||||||
|
self.last_used = datetime.strftime(
|
||||||
|
datetime.utcnow(),
|
||||||
|
"%Y-%m-%dT%H:%M:%SZ"
|
||||||
|
)
|
||||||
|
|
||||||
def get_cfn_attribute(self, attribute_name):
|
def get_cfn_attribute(self, attribute_name):
|
||||||
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
||||||
@ -297,6 +339,7 @@ class User(BaseModel):
|
|||||||
self.access_keys = []
|
self.access_keys = []
|
||||||
self.password = None
|
self.password = None
|
||||||
self.password_reset_required = False
|
self.password_reset_required = False
|
||||||
|
self.signing_certificates = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def arn(self):
|
def arn(self):
|
||||||
@ -352,21 +395,20 @@ class User(BaseModel):
|
|||||||
return self.access_keys
|
return self.access_keys
|
||||||
|
|
||||||
def delete_access_key(self, access_key_id):
|
def delete_access_key(self, access_key_id):
|
||||||
for key in self.access_keys:
|
key = self.get_access_key_by_id(access_key_id)
|
||||||
if key.access_key_id == access_key_id:
|
|
||||||
self.access_keys.remove(key)
|
self.access_keys.remove(key)
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise IAMNotFoundException(
|
|
||||||
"Key {0} not found".format(access_key_id))
|
|
||||||
|
|
||||||
def update_access_key(self, access_key_id, status):
|
def update_access_key(self, access_key_id, status):
|
||||||
|
key = self.get_access_key_by_id(access_key_id)
|
||||||
|
key.status = status
|
||||||
|
|
||||||
|
def get_access_key_by_id(self, access_key_id):
|
||||||
for key in self.access_keys:
|
for key in self.access_keys:
|
||||||
if key.access_key_id == access_key_id:
|
if key.access_key_id == access_key_id:
|
||||||
key.status = status
|
return key
|
||||||
break
|
|
||||||
else:
|
else:
|
||||||
raise IAMNotFoundException("The Access Key with id {0} cannot be found".format(access_key_id))
|
raise IAMNotFoundException(
|
||||||
|
"The Access Key with id {0} cannot be found".format(access_key_id))
|
||||||
|
|
||||||
def get_cfn_attribute(self, attribute_name):
|
def get_cfn_attribute(self, attribute_name):
|
||||||
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
||||||
@ -427,6 +469,7 @@ class IAMBackend(BaseBackend):
|
|||||||
self.credential_report = None
|
self.credential_report = None
|
||||||
self.managed_policies = self._init_managed_policies()
|
self.managed_policies = self._init_managed_policies()
|
||||||
self.account_aliases = []
|
self.account_aliases = []
|
||||||
|
self.saml_providers = {}
|
||||||
super(IAMBackend, self).__init__()
|
super(IAMBackend, self).__init__()
|
||||||
|
|
||||||
def _init_managed_policies(self):
|
def _init_managed_policies(self):
|
||||||
@ -437,6 +480,16 @@ class IAMBackend(BaseBackend):
|
|||||||
policy = arns[policy_arn]
|
policy = arns[policy_arn]
|
||||||
policy.attach_to(self.get_role(role_name))
|
policy.attach_to(self.get_role(role_name))
|
||||||
|
|
||||||
|
def update_role_description(self, role_name, role_description):
|
||||||
|
role = self.get_role(role_name)
|
||||||
|
role.description = role_description
|
||||||
|
return role
|
||||||
|
|
||||||
|
def update_role(self, role_name, role_description):
|
||||||
|
role = self.get_role(role_name)
|
||||||
|
role.description = role_description
|
||||||
|
return role
|
||||||
|
|
||||||
def detach_role_policy(self, policy_arn, role_name):
|
def detach_role_policy(self, policy_arn, role_name):
|
||||||
arns = dict((p.arn, p) for p in self.managed_policies.values())
|
arns = dict((p.arn, p) for p in self.managed_policies.values())
|
||||||
try:
|
try:
|
||||||
@ -583,13 +636,94 @@ class IAMBackend(BaseBackend):
|
|||||||
role = self.get_role(role_name)
|
role = self.get_role(role_name)
|
||||||
return role.policies.keys()
|
return role.policies.keys()
|
||||||
|
|
||||||
|
def _validate_tag_key(self, tag_key, exception_param='tags.X.member.key'):
|
||||||
|
"""Validates the tag key.
|
||||||
|
|
||||||
|
:param all_tags: Dict to check if there is a duplicate tag.
|
||||||
|
:param tag_key: The tag key to check against.
|
||||||
|
:param exception_param: The exception parameter to send over to help format the message. This is to reflect
|
||||||
|
the difference between the tag and untag APIs.
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
# Validate that the key length is correct:
|
||||||
|
if len(tag_key) > 128:
|
||||||
|
raise TagKeyTooBig(tag_key, param=exception_param)
|
||||||
|
|
||||||
|
# Validate that the tag key fits the proper Regex:
|
||||||
|
# [\w\s_.:/=+\-@]+ SHOULD be the same as the Java regex on the AWS documentation: [\p{L}\p{Z}\p{N}_.:/=+\-@]+
|
||||||
|
match = re.findall(r'[\w\s_.:/=+\-@]+', tag_key)
|
||||||
|
# Kudos if you can come up with a better way of doing a global search :)
|
||||||
|
if not len(match) or len(match[0]) < len(tag_key):
|
||||||
|
raise InvalidTagCharacters(tag_key, param=exception_param)
|
||||||
|
|
||||||
|
def _check_tag_duplicate(self, all_tags, tag_key):
|
||||||
|
"""Validates that a tag key is not a duplicate
|
||||||
|
|
||||||
|
:param all_tags: Dict to check if there is a duplicate tag.
|
||||||
|
:param tag_key: The tag key to check against.
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if tag_key in all_tags:
|
||||||
|
raise DuplicateTags()
|
||||||
|
|
||||||
|
def list_role_tags(self, role_name, marker, max_items=100):
|
||||||
|
role = self.get_role(role_name)
|
||||||
|
|
||||||
|
max_items = int(max_items)
|
||||||
|
tag_index = sorted(role.tags)
|
||||||
|
start_idx = int(marker) if marker else 0
|
||||||
|
|
||||||
|
tag_index = tag_index[start_idx:start_idx + max_items]
|
||||||
|
|
||||||
|
if len(role.tags) <= (start_idx + max_items):
|
||||||
|
marker = None
|
||||||
|
else:
|
||||||
|
marker = str(start_idx + max_items)
|
||||||
|
|
||||||
|
# Make the tag list of dict's:
|
||||||
|
tags = [role.tags[tag] for tag in tag_index]
|
||||||
|
|
||||||
|
return tags, marker
|
||||||
|
|
||||||
|
def tag_role(self, role_name, tags):
|
||||||
|
if len(tags) > 50:
|
||||||
|
raise TooManyTags(tags)
|
||||||
|
|
||||||
|
role = self.get_role(role_name)
|
||||||
|
|
||||||
|
tag_keys = {}
|
||||||
|
for tag in tags:
|
||||||
|
# Need to index by the lowercase tag key since the keys are case insensitive, but their case is retained.
|
||||||
|
ref_key = tag['Key'].lower()
|
||||||
|
self._check_tag_duplicate(tag_keys, ref_key)
|
||||||
|
self._validate_tag_key(tag['Key'])
|
||||||
|
if len(tag['Value']) > 256:
|
||||||
|
raise TagValueTooBig(tag['Value'])
|
||||||
|
|
||||||
|
tag_keys[ref_key] = tag
|
||||||
|
|
||||||
|
role.tags.update(tag_keys)
|
||||||
|
|
||||||
|
def untag_role(self, role_name, tag_keys):
|
||||||
|
if len(tag_keys) > 50:
|
||||||
|
raise TooManyTags(tag_keys, param='tagKeys')
|
||||||
|
|
||||||
|
role = self.get_role(role_name)
|
||||||
|
|
||||||
|
for key in tag_keys:
|
||||||
|
ref_key = key.lower()
|
||||||
|
self._validate_tag_key(key, exception_param='tagKeys')
|
||||||
|
|
||||||
|
role.tags.pop(ref_key, None)
|
||||||
|
|
||||||
def create_policy_version(self, policy_arn, policy_document, set_as_default):
|
def create_policy_version(self, policy_arn, policy_document, set_as_default):
|
||||||
policy = self.get_policy(policy_arn)
|
policy = self.get_policy(policy_arn)
|
||||||
if not policy:
|
if not policy:
|
||||||
raise IAMNotFoundException("Policy not found")
|
raise IAMNotFoundException("Policy not found")
|
||||||
version = PolicyVersion(policy_arn, policy_document, set_as_default)
|
version = PolicyVersion(policy_arn, policy_document, set_as_default)
|
||||||
policy.versions.append(version)
|
policy.versions.append(version)
|
||||||
version.version_id = 'v{0}'.format(len(policy.versions))
|
version.version_id = 'v{0}'.format(policy.next_version_num)
|
||||||
|
policy.next_version_num += 1
|
||||||
if set_as_default:
|
if set_as_default:
|
||||||
policy.default_version_id = version.version_id
|
policy.default_version_id = version.version_id
|
||||||
return version
|
return version
|
||||||
@ -765,6 +899,70 @@ class IAMBackend(BaseBackend):
|
|||||||
|
|
||||||
return users
|
return users
|
||||||
|
|
||||||
|
def update_user(self, user_name, new_path=None, new_user_name=None):
|
||||||
|
try:
|
||||||
|
user = self.users[user_name]
|
||||||
|
except KeyError:
|
||||||
|
raise IAMNotFoundException("User {0} not found".format(user_name))
|
||||||
|
|
||||||
|
if new_path:
|
||||||
|
user.path = new_path
|
||||||
|
if new_user_name:
|
||||||
|
user.name = new_user_name
|
||||||
|
self.users[new_user_name] = self.users.pop(user_name)
|
||||||
|
|
||||||
|
def list_roles(self, path_prefix, marker, max_items):
|
||||||
|
roles = None
|
||||||
|
try:
|
||||||
|
roles = self.roles.values()
|
||||||
|
except KeyError:
|
||||||
|
raise IAMNotFoundException(
|
||||||
|
"Users {0}, {1}, {2} not found".format(path_prefix, marker, max_items))
|
||||||
|
|
||||||
|
return roles
|
||||||
|
|
||||||
|
def upload_signing_certificate(self, user_name, body):
|
||||||
|
user = self.get_user(user_name)
|
||||||
|
cert_id = random_resource_id(size=32)
|
||||||
|
|
||||||
|
# Validate the signing cert:
|
||||||
|
try:
|
||||||
|
if sys.version_info < (3, 0):
|
||||||
|
data = bytes(body)
|
||||||
|
else:
|
||||||
|
data = bytes(body, 'utf8')
|
||||||
|
|
||||||
|
x509.load_pem_x509_certificate(data, default_backend())
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
raise MalformedCertificate(body)
|
||||||
|
|
||||||
|
user.signing_certificates[cert_id] = SigningCertificate(cert_id, user_name, body)
|
||||||
|
|
||||||
|
return user.signing_certificates[cert_id]
|
||||||
|
|
||||||
|
def delete_signing_certificate(self, user_name, cert_id):
|
||||||
|
user = self.get_user(user_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
del user.signing_certificates[cert_id]
|
||||||
|
except KeyError:
|
||||||
|
raise IAMNotFoundException("The Certificate with id {id} cannot be found.".format(id=cert_id))
|
||||||
|
|
||||||
|
def list_signing_certificates(self, user_name):
|
||||||
|
user = self.get_user(user_name)
|
||||||
|
|
||||||
|
return list(user.signing_certificates.values())
|
||||||
|
|
||||||
|
def update_signing_certificate(self, user_name, cert_id, status):
|
||||||
|
user = self.get_user(user_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
user.signing_certificates[cert_id].status = status
|
||||||
|
|
||||||
|
except KeyError:
|
||||||
|
raise IAMNotFoundException("The Certificate with id {id} cannot be found.".format(id=cert_id))
|
||||||
|
|
||||||
def create_login_profile(self, user_name, password):
|
def create_login_profile(self, user_name, password):
|
||||||
# This does not currently deal with PasswordPolicyViolation.
|
# This does not currently deal with PasswordPolicyViolation.
|
||||||
user = self.get_user(user_name)
|
user = self.get_user(user_name)
|
||||||
@ -838,6 +1036,24 @@ class IAMBackend(BaseBackend):
|
|||||||
user = self.get_user(user_name)
|
user = self.get_user(user_name)
|
||||||
user.update_access_key(access_key_id, status)
|
user.update_access_key(access_key_id, status)
|
||||||
|
|
||||||
|
def get_access_key_last_used(self, access_key_id):
|
||||||
|
access_keys_list = self.get_all_access_keys_for_all_users()
|
||||||
|
for key in access_keys_list:
|
||||||
|
if key.access_key_id == access_key_id:
|
||||||
|
return {
|
||||||
|
'user_name': key.user_name,
|
||||||
|
'last_used': key.last_used
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
raise IAMNotFoundException(
|
||||||
|
"The Access Key with id {0} cannot be found".format(access_key_id))
|
||||||
|
|
||||||
|
def get_all_access_keys_for_all_users(self):
|
||||||
|
access_keys_list = []
|
||||||
|
for user_name in self.users:
|
||||||
|
access_keys_list += self.get_all_access_keys(user_name)
|
||||||
|
return access_keys_list
|
||||||
|
|
||||||
def get_all_access_keys(self, user_name, marker=None, max_items=None):
|
def get_all_access_keys(self, user_name, marker=None, max_items=None):
|
||||||
user = self.get_user(user_name)
|
user = self.get_user(user_name)
|
||||||
keys = user.get_all_access_keys()
|
keys = user.get_all_access_keys()
|
||||||
@ -937,5 +1153,33 @@ class IAMBackend(BaseBackend):
|
|||||||
'managed_policies': returned_policies
|
'managed_policies': returned_policies
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def create_saml_provider(self, name, saml_metadata_document):
|
||||||
|
saml_provider = SAMLProvider(name, saml_metadata_document)
|
||||||
|
self.saml_providers[name] = saml_provider
|
||||||
|
return saml_provider
|
||||||
|
|
||||||
|
def update_saml_provider(self, saml_provider_arn, saml_metadata_document):
|
||||||
|
saml_provider = self.get_saml_provider(saml_provider_arn)
|
||||||
|
saml_provider.saml_metadata_document = saml_metadata_document
|
||||||
|
return saml_provider
|
||||||
|
|
||||||
|
def delete_saml_provider(self, saml_provider_arn):
|
||||||
|
try:
|
||||||
|
for saml_provider in list(self.list_saml_providers()):
|
||||||
|
if saml_provider.arn == saml_provider_arn:
|
||||||
|
del self.saml_providers[saml_provider.name]
|
||||||
|
except KeyError:
|
||||||
|
raise IAMNotFoundException(
|
||||||
|
"SAMLProvider {0} not found".format(saml_provider_arn))
|
||||||
|
|
||||||
|
def list_saml_providers(self):
|
||||||
|
return self.saml_providers.values()
|
||||||
|
|
||||||
|
def get_saml_provider(self, saml_provider_arn):
|
||||||
|
for saml_provider in self.list_saml_providers():
|
||||||
|
if saml_provider.arn == saml_provider_arn:
|
||||||
|
return saml_provider
|
||||||
|
raise IAMNotFoundException("SamlProvider {0} not found".format(saml_provider_arn))
|
||||||
|
|
||||||
|
|
||||||
iam_backend = IAMBackend()
|
iam_backend = IAMBackend()
|
||||||
|
@ -107,6 +107,69 @@ class IamResponse(BaseResponse):
|
|||||||
template = self.response_template(LIST_POLICIES_TEMPLATE)
|
template = self.response_template(LIST_POLICIES_TEMPLATE)
|
||||||
return template.render(policies=policies, marker=marker)
|
return template.render(policies=policies, marker=marker)
|
||||||
|
|
||||||
|
def list_entities_for_policy(self):
|
||||||
|
policy_arn = self._get_param('PolicyArn')
|
||||||
|
|
||||||
|
# Options 'User'|'Role'|'Group'|'LocalManagedPolicy'|'AWSManagedPolicy
|
||||||
|
entity = self._get_param('EntityFilter')
|
||||||
|
path_prefix = self._get_param('PathPrefix')
|
||||||
|
# policy_usage_filter = self._get_param('PolicyUsageFilter')
|
||||||
|
marker = self._get_param('Marker')
|
||||||
|
max_items = self._get_param('MaxItems')
|
||||||
|
|
||||||
|
entity_roles = []
|
||||||
|
entity_groups = []
|
||||||
|
entity_users = []
|
||||||
|
|
||||||
|
if entity == 'User':
|
||||||
|
users = iam_backend.list_users(path_prefix, marker, max_items)
|
||||||
|
if users:
|
||||||
|
for user in users:
|
||||||
|
for p in user.managed_policies:
|
||||||
|
if p == policy_arn:
|
||||||
|
entity_users.append(user.name)
|
||||||
|
|
||||||
|
elif entity == 'Role':
|
||||||
|
roles = iam_backend.list_roles(path_prefix, marker, max_items)
|
||||||
|
if roles:
|
||||||
|
for role in roles:
|
||||||
|
for p in role.managed_policies:
|
||||||
|
if p == policy_arn:
|
||||||
|
entity_roles.append(role.name)
|
||||||
|
|
||||||
|
elif entity == 'Group':
|
||||||
|
groups = iam_backend.list_groups()
|
||||||
|
if groups:
|
||||||
|
for group in groups:
|
||||||
|
for p in group.managed_policies:
|
||||||
|
if p == policy_arn:
|
||||||
|
entity_groups.append(group.name)
|
||||||
|
|
||||||
|
elif entity == 'LocalManagedPolicy' or entity == 'AWSManagedPolicy':
|
||||||
|
users = iam_backend.list_users(path_prefix, marker, max_items)
|
||||||
|
if users:
|
||||||
|
for user in users:
|
||||||
|
for p in user.managed_policies:
|
||||||
|
if p == policy_arn:
|
||||||
|
entity_users.append(user.name)
|
||||||
|
|
||||||
|
roles = iam_backend.list_roles(path_prefix, marker, max_items)
|
||||||
|
if roles:
|
||||||
|
for role in roles:
|
||||||
|
for p in role.managed_policies:
|
||||||
|
if p == policy_arn:
|
||||||
|
entity_roles.append(role.name)
|
||||||
|
|
||||||
|
groups = iam_backend.list_groups()
|
||||||
|
if groups:
|
||||||
|
for group in groups:
|
||||||
|
for p in group.managed_policies:
|
||||||
|
if p == policy_arn:
|
||||||
|
entity_groups.append(group.name)
|
||||||
|
|
||||||
|
template = self.response_template(LIST_ENTITIES_FOR_POLICY_TEMPLATE)
|
||||||
|
return template.render(roles=entity_roles, users=entity_users, groups=entity_groups)
|
||||||
|
|
||||||
def create_role(self):
|
def create_role(self):
|
||||||
role_name = self._get_param('RoleName')
|
role_name = self._get_param('RoleName')
|
||||||
path = self._get_param('Path')
|
path = self._get_param('Path')
|
||||||
@ -169,6 +232,20 @@ class IamResponse(BaseResponse):
|
|||||||
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
|
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
|
||||||
return template.render(name="UpdateAssumeRolePolicyResponse")
|
return template.render(name="UpdateAssumeRolePolicyResponse")
|
||||||
|
|
||||||
|
def update_role_description(self):
|
||||||
|
role_name = self._get_param('RoleName')
|
||||||
|
description = self._get_param('Description')
|
||||||
|
role = iam_backend.update_role_description(role_name, description)
|
||||||
|
template = self.response_template(UPDATE_ROLE_DESCRIPTION_TEMPLATE)
|
||||||
|
return template.render(role=role)
|
||||||
|
|
||||||
|
def update_role(self):
|
||||||
|
role_name = self._get_param('RoleName')
|
||||||
|
description = self._get_param('Description')
|
||||||
|
role = iam_backend.update_role(role_name, description)
|
||||||
|
template = self.response_template(UPDATE_ROLE_TEMPLATE)
|
||||||
|
return template.render(role=role)
|
||||||
|
|
||||||
def create_policy_version(self):
|
def create_policy_version(self):
|
||||||
policy_arn = self._get_param('PolicyArn')
|
policy_arn = self._get_param('PolicyArn')
|
||||||
policy_document = self._get_param('PolicyDocument')
|
policy_document = self._get_param('PolicyDocument')
|
||||||
@ -201,7 +278,7 @@ class IamResponse(BaseResponse):
|
|||||||
|
|
||||||
def create_instance_profile(self):
|
def create_instance_profile(self):
|
||||||
profile_name = self._get_param('InstanceProfileName')
|
profile_name = self._get_param('InstanceProfileName')
|
||||||
path = self._get_param('Path')
|
path = self._get_param('Path', '/')
|
||||||
|
|
||||||
profile = iam_backend.create_instance_profile(
|
profile = iam_backend.create_instance_profile(
|
||||||
profile_name, path, role_ids=[])
|
profile_name, path, role_ids=[])
|
||||||
@ -363,6 +440,18 @@ class IamResponse(BaseResponse):
|
|||||||
template = self.response_template(LIST_USERS_TEMPLATE)
|
template = self.response_template(LIST_USERS_TEMPLATE)
|
||||||
return template.render(action='List', users=users)
|
return template.render(action='List', users=users)
|
||||||
|
|
||||||
|
def update_user(self):
|
||||||
|
user_name = self._get_param('UserName')
|
||||||
|
new_path = self._get_param('NewPath')
|
||||||
|
new_user_name = self._get_param('NewUserName')
|
||||||
|
iam_backend.update_user(user_name, new_path, new_user_name)
|
||||||
|
if new_user_name:
|
||||||
|
user = iam_backend.get_user(new_user_name)
|
||||||
|
else:
|
||||||
|
user = iam_backend.get_user(user_name)
|
||||||
|
template = self.response_template(USER_TEMPLATE)
|
||||||
|
return template.render(action='Update', user=user)
|
||||||
|
|
||||||
def create_login_profile(self):
|
def create_login_profile(self):
|
||||||
user_name = self._get_param('UserName')
|
user_name = self._get_param('UserName')
|
||||||
password = self._get_param('Password')
|
password = self._get_param('Password')
|
||||||
@ -454,9 +543,14 @@ class IamResponse(BaseResponse):
|
|||||||
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
|
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
|
||||||
return template.render(name='UpdateAccessKey')
|
return template.render(name='UpdateAccessKey')
|
||||||
|
|
||||||
|
def get_access_key_last_used(self):
|
||||||
|
access_key_id = self._get_param('AccessKeyId')
|
||||||
|
last_used_response = iam_backend.get_access_key_last_used(access_key_id)
|
||||||
|
template = self.response_template(GET_ACCESS_KEY_LAST_USED_TEMPLATE)
|
||||||
|
return template.render(user_name=last_used_response["user_name"], last_used=last_used_response["last_used"])
|
||||||
|
|
||||||
def list_access_keys(self):
|
def list_access_keys(self):
|
||||||
user_name = self._get_param('UserName')
|
user_name = self._get_param('UserName')
|
||||||
|
|
||||||
keys = iam_backend.get_all_access_keys(user_name)
|
keys = iam_backend.get_all_access_keys(user_name)
|
||||||
template = self.response_template(LIST_ACCESS_KEYS_TEMPLATE)
|
template = self.response_template(LIST_ACCESS_KEYS_TEMPLATE)
|
||||||
return template.render(user_name=user_name, keys=keys)
|
return template.render(user_name=user_name, keys=keys)
|
||||||
@ -549,9 +643,137 @@ class IamResponse(BaseResponse):
|
|||||||
policies=account_details['managed_policies'],
|
policies=account_details['managed_policies'],
|
||||||
users=account_details['users'],
|
users=account_details['users'],
|
||||||
groups=account_details['groups'],
|
groups=account_details['groups'],
|
||||||
roles=account_details['roles']
|
roles=account_details['roles'],
|
||||||
|
get_groups_for_user=iam_backend.get_groups_for_user
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def create_saml_provider(self):
|
||||||
|
saml_provider_name = self._get_param('Name')
|
||||||
|
saml_metadata_document = self._get_param('SAMLMetadataDocument')
|
||||||
|
saml_provider = iam_backend.create_saml_provider(saml_provider_name, saml_metadata_document)
|
||||||
|
|
||||||
|
template = self.response_template(CREATE_SAML_PROVIDER_TEMPLATE)
|
||||||
|
return template.render(saml_provider=saml_provider)
|
||||||
|
|
||||||
|
def update_saml_provider(self):
|
||||||
|
saml_provider_arn = self._get_param('SAMLProviderArn')
|
||||||
|
saml_metadata_document = self._get_param('SAMLMetadataDocument')
|
||||||
|
saml_provider = iam_backend.update_saml_provider(saml_provider_arn, saml_metadata_document)
|
||||||
|
|
||||||
|
template = self.response_template(UPDATE_SAML_PROVIDER_TEMPLATE)
|
||||||
|
return template.render(saml_provider=saml_provider)
|
||||||
|
|
||||||
|
def delete_saml_provider(self):
|
||||||
|
saml_provider_arn = self._get_param('SAMLProviderArn')
|
||||||
|
iam_backend.delete_saml_provider(saml_provider_arn)
|
||||||
|
|
||||||
|
template = self.response_template(DELETE_SAML_PROVIDER_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
def list_saml_providers(self):
|
||||||
|
saml_providers = iam_backend.list_saml_providers()
|
||||||
|
|
||||||
|
template = self.response_template(LIST_SAML_PROVIDERS_TEMPLATE)
|
||||||
|
return template.render(saml_providers=saml_providers)
|
||||||
|
|
||||||
|
def get_saml_provider(self):
|
||||||
|
saml_provider_arn = self._get_param('SAMLProviderArn')
|
||||||
|
saml_provider = iam_backend.get_saml_provider(saml_provider_arn)
|
||||||
|
|
||||||
|
template = self.response_template(GET_SAML_PROVIDER_TEMPLATE)
|
||||||
|
return template.render(saml_provider=saml_provider)
|
||||||
|
|
||||||
|
def upload_signing_certificate(self):
|
||||||
|
user_name = self._get_param('UserName')
|
||||||
|
cert_body = self._get_param('CertificateBody')
|
||||||
|
|
||||||
|
cert = iam_backend.upload_signing_certificate(user_name, cert_body)
|
||||||
|
template = self.response_template(UPLOAD_SIGNING_CERTIFICATE_TEMPLATE)
|
||||||
|
return template.render(cert=cert)
|
||||||
|
|
||||||
|
def update_signing_certificate(self):
|
||||||
|
user_name = self._get_param('UserName')
|
||||||
|
cert_id = self._get_param('CertificateId')
|
||||||
|
status = self._get_param('Status')
|
||||||
|
|
||||||
|
iam_backend.update_signing_certificate(user_name, cert_id, status)
|
||||||
|
template = self.response_template(UPDATE_SIGNING_CERTIFICATE_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
def delete_signing_certificate(self):
|
||||||
|
user_name = self._get_param('UserName')
|
||||||
|
cert_id = self._get_param('CertificateId')
|
||||||
|
|
||||||
|
iam_backend.delete_signing_certificate(user_name, cert_id)
|
||||||
|
template = self.response_template(DELETE_SIGNING_CERTIFICATE_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
def list_signing_certificates(self):
|
||||||
|
user_name = self._get_param('UserName')
|
||||||
|
|
||||||
|
certs = iam_backend.list_signing_certificates(user_name)
|
||||||
|
template = self.response_template(LIST_SIGNING_CERTIFICATES_TEMPLATE)
|
||||||
|
return template.render(user_name=user_name, certificates=certs)
|
||||||
|
|
||||||
|
def list_role_tags(self):
|
||||||
|
role_name = self._get_param('RoleName')
|
||||||
|
marker = self._get_param('Marker')
|
||||||
|
max_items = self._get_param('MaxItems', 100)
|
||||||
|
|
||||||
|
tags, marker = iam_backend.list_role_tags(role_name, marker, max_items)
|
||||||
|
|
||||||
|
template = self.response_template(LIST_ROLE_TAG_TEMPLATE)
|
||||||
|
return template.render(tags=tags, marker=marker)
|
||||||
|
|
||||||
|
def tag_role(self):
|
||||||
|
role_name = self._get_param('RoleName')
|
||||||
|
tags = self._get_multi_param('Tags.member')
|
||||||
|
|
||||||
|
iam_backend.tag_role(role_name, tags)
|
||||||
|
|
||||||
|
template = self.response_template(TAG_ROLE_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
def untag_role(self):
|
||||||
|
role_name = self._get_param('RoleName')
|
||||||
|
tag_keys = self._get_multi_param('TagKeys.member')
|
||||||
|
|
||||||
|
iam_backend.untag_role(role_name, tag_keys)
|
||||||
|
|
||||||
|
template = self.response_template(UNTAG_ROLE_TEMPLATE)
|
||||||
|
return template.render()
|
||||||
|
|
||||||
|
|
||||||
|
LIST_ENTITIES_FOR_POLICY_TEMPLATE = """<ListEntitiesForPolicyResponse>
|
||||||
|
<ListEntitiesForPolicyResult>
|
||||||
|
<PolicyRoles>
|
||||||
|
{% for role in roles %}
|
||||||
|
<member>
|
||||||
|
<RoleName>{{ role }}</RoleName>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</PolicyRoles>
|
||||||
|
<PolicyGroups>
|
||||||
|
{% for group in groups %}
|
||||||
|
<member>
|
||||||
|
<GroupName>{{ group }}</GroupName>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</PolicyGroups>
|
||||||
|
<IsTruncated>false</IsTruncated>
|
||||||
|
<PolicyUsers>
|
||||||
|
{% for user in users %}
|
||||||
|
<member>
|
||||||
|
<UserName>{{ user }}</UserName>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</PolicyUsers>
|
||||||
|
</ListEntitiesForPolicyResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>eb358e22-9d1f-11e4-93eb-190ecEXAMPLE</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</ListEntitiesForPolicyResponse>"""
|
||||||
|
|
||||||
|
|
||||||
ATTACH_ROLE_POLICY_TEMPLATE = """<AttachRolePolicyResponse>
|
ATTACH_ROLE_POLICY_TEMPLATE = """<AttachRolePolicyResponse>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
@ -734,7 +956,7 @@ CREATE_INSTANCE_PROFILE_TEMPLATE = """<CreateInstanceProfileResponse xmlns="http
|
|||||||
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
||||||
<Path>{{ profile.path }}</Path>
|
<Path>{{ profile.path }}</Path>
|
||||||
<Arn>{{ profile.arn }}</Arn>
|
<Arn>{{ profile.arn }}</Arn>
|
||||||
<CreateDate>2012-05-09T16:11:10.222Z</CreateDate>
|
<CreateDate>{{ profile.create_date }}</CreateDate>
|
||||||
</InstanceProfile>
|
</InstanceProfile>
|
||||||
</CreateInstanceProfileResult>
|
</CreateInstanceProfileResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
@ -753,7 +975,7 @@ GET_INSTANCE_PROFILE_TEMPLATE = """<GetInstanceProfileResponse xmlns="https://ia
|
|||||||
<Arn>{{ role.arn }}</Arn>
|
<Arn>{{ role.arn }}</Arn>
|
||||||
<RoleName>{{ role.name }}</RoleName>
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ role.create_date }}</CreateDate>
|
||||||
<RoleId>{{ role.id }}</RoleId>
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -761,7 +983,7 @@ GET_INSTANCE_PROFILE_TEMPLATE = """<GetInstanceProfileResponse xmlns="https://ia
|
|||||||
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
||||||
<Path>{{ profile.path }}</Path>
|
<Path>{{ profile.path }}</Path>
|
||||||
<Arn>{{ profile.arn }}</Arn>
|
<Arn>{{ profile.arn }}</Arn>
|
||||||
<CreateDate>2012-05-09T16:11:10Z</CreateDate>
|
<CreateDate>{{ profile.create_date }}</CreateDate>
|
||||||
</InstanceProfile>
|
</InstanceProfile>
|
||||||
</GetInstanceProfileResult>
|
</GetInstanceProfileResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
@ -776,7 +998,7 @@ CREATE_ROLE_TEMPLATE = """<CreateRoleResponse xmlns="https://iam.amazonaws.com/d
|
|||||||
<Arn>{{ role.arn }}</Arn>
|
<Arn>{{ role.arn }}</Arn>
|
||||||
<RoleName>{{ role.name }}</RoleName>
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||||
<CreateDate>2012-05-08T23:34:01.495Z</CreateDate>
|
<CreateDate>{{ role.create_date }}</CreateDate>
|
||||||
<RoleId>{{ role.id }}</RoleId>
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
</Role>
|
</Role>
|
||||||
</CreateRoleResult>
|
</CreateRoleResult>
|
||||||
@ -796,6 +1018,40 @@ GET_ROLE_POLICY_TEMPLATE = """<GetRolePolicyResponse xmlns="https://iam.amazonaw
|
|||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</GetRolePolicyResponse>"""
|
</GetRolePolicyResponse>"""
|
||||||
|
|
||||||
|
UPDATE_ROLE_TEMPLATE = """<UpdateRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<UpdateRoleResult>
|
||||||
|
</UpdateRoleResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>df37e965-9967-11e1-a4c3-270EXAMPLE04</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</UpdateRoleResponse>"""
|
||||||
|
|
||||||
|
UPDATE_ROLE_DESCRIPTION_TEMPLATE = """<UpdateRoleDescriptionResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<UpdateRoleDescriptionResult>
|
||||||
|
<Role>
|
||||||
|
<Path>{{ role.path }}</Path>
|
||||||
|
<Arn>{{ role.arn }}</Arn>
|
||||||
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
|
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||||
|
<CreateDate>{{ role.create_date.isoformat() }}</CreateDate>
|
||||||
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
|
{% if role.tags %}
|
||||||
|
<Tags>
|
||||||
|
{% for tag in role.get_tags() %}
|
||||||
|
<member>
|
||||||
|
<Key>{{ tag['Key'] }}</Key>
|
||||||
|
<Value>{{ tag['Value'] }}</Value>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Tags>
|
||||||
|
{% endif %}
|
||||||
|
</Role>
|
||||||
|
</UpdateRoleDescriptionResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>df37e965-9967-11e1-a4c3-270EXAMPLE04</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</UpdateRoleDescriptionResponse>"""
|
||||||
|
|
||||||
GET_ROLE_TEMPLATE = """<GetRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
GET_ROLE_TEMPLATE = """<GetRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
<GetRoleResult>
|
<GetRoleResult>
|
||||||
<Role>
|
<Role>
|
||||||
@ -803,8 +1059,18 @@ GET_ROLE_TEMPLATE = """<GetRoleResponse xmlns="https://iam.amazonaws.com/doc/201
|
|||||||
<Arn>{{ role.arn }}</Arn>
|
<Arn>{{ role.arn }}</Arn>
|
||||||
<RoleName>{{ role.name }}</RoleName>
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||||
<CreateDate>2012-05-08T23:34:01Z</CreateDate>
|
<CreateDate>{{ role.create_date }}</CreateDate>
|
||||||
<RoleId>{{ role.id }}</RoleId>
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
|
{% if role.tags %}
|
||||||
|
<Tags>
|
||||||
|
{% for tag in role.get_tags() %}
|
||||||
|
<member>
|
||||||
|
<Key>{{ tag['Key'] }}</Key>
|
||||||
|
<Value>{{ tag['Value'] }}</Value>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Tags>
|
||||||
|
{% endif %}
|
||||||
</Role>
|
</Role>
|
||||||
</GetRoleResult>
|
</GetRoleResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
@ -834,7 +1100,7 @@ LIST_ROLES_TEMPLATE = """<ListRolesResponse xmlns="https://iam.amazonaws.com/doc
|
|||||||
<Arn>{{ role.arn }}</Arn>
|
<Arn>{{ role.arn }}</Arn>
|
||||||
<RoleName>{{ role.name }}</RoleName>
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ role.create_date }}</CreateDate>
|
||||||
<RoleId>{{ role.id }}</RoleId>
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -865,7 +1131,7 @@ CREATE_POLICY_VERSION_TEMPLATE = """<CreatePolicyVersionResponse xmlns="https://
|
|||||||
<Document>{{ policy_version.document }}</Document>
|
<Document>{{ policy_version.document }}</Document>
|
||||||
<VersionId>{{ policy_version.version_id }}</VersionId>
|
<VersionId>{{ policy_version.version_id }}</VersionId>
|
||||||
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
|
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ policy_version.create_datetime }}</CreateDate>
|
||||||
</PolicyVersion>
|
</PolicyVersion>
|
||||||
</CreatePolicyVersionResult>
|
</CreatePolicyVersionResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
@ -879,7 +1145,7 @@ GET_POLICY_VERSION_TEMPLATE = """<GetPolicyVersionResponse xmlns="https://iam.am
|
|||||||
<Document>{{ policy_version.document }}</Document>
|
<Document>{{ policy_version.document }}</Document>
|
||||||
<VersionId>{{ policy_version.version_id }}</VersionId>
|
<VersionId>{{ policy_version.version_id }}</VersionId>
|
||||||
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
|
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ policy_version.create_datetime }}</CreateDate>
|
||||||
</PolicyVersion>
|
</PolicyVersion>
|
||||||
</GetPolicyVersionResult>
|
</GetPolicyVersionResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
@ -896,7 +1162,7 @@ LIST_POLICY_VERSIONS_TEMPLATE = """<ListPolicyVersionsResponse xmlns="https://ia
|
|||||||
<Document>{{ policy_version.document }}</Document>
|
<Document>{{ policy_version.document }}</Document>
|
||||||
<VersionId>{{ policy_version.version_id }}</VersionId>
|
<VersionId>{{ policy_version.version_id }}</VersionId>
|
||||||
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
|
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ policy_version.create_datetime }}</CreateDate>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</Versions>
|
</Versions>
|
||||||
@ -912,7 +1178,7 @@ LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https:
|
|||||||
<InstanceProfiles>
|
<InstanceProfiles>
|
||||||
{% for instance in instance_profiles %}
|
{% for instance in instance_profiles %}
|
||||||
<member>
|
<member>
|
||||||
<Id>{{ instance.id }}</Id>
|
<InstanceProfileId>{{ instance.id }}</InstanceProfileId>
|
||||||
<Roles>
|
<Roles>
|
||||||
{% for role in instance.roles %}
|
{% for role in instance.roles %}
|
||||||
<member>
|
<member>
|
||||||
@ -920,7 +1186,7 @@ LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https:
|
|||||||
<Arn>{{ role.arn }}</Arn>
|
<Arn>{{ role.arn }}</Arn>
|
||||||
<RoleName>{{ role.name }}</RoleName>
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ role.create_date }}</CreateDate>
|
||||||
<RoleId>{{ role.id }}</RoleId>
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -928,7 +1194,7 @@ LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https:
|
|||||||
<InstanceProfileName>{{ instance.name }}</InstanceProfileName>
|
<InstanceProfileName>{{ instance.name }}</InstanceProfileName>
|
||||||
<Path>{{ instance.path }}</Path>
|
<Path>{{ instance.path }}</Path>
|
||||||
<Arn>{{ instance.arn }}</Arn>
|
<Arn>{{ instance.arn }}</Arn>
|
||||||
<CreateDate>2012-05-09T16:27:03Z</CreateDate>
|
<CreateDate>{{ instance.create_date }}</CreateDate>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</InstanceProfiles>
|
</InstanceProfiles>
|
||||||
@ -1199,8 +1465,8 @@ LIST_USER_POLICIES_TEMPLATE = """<ListUserPoliciesResponse>
|
|||||||
<member>{{ policy }}</member>
|
<member>{{ policy }}</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</PolicyNames>
|
</PolicyNames>
|
||||||
</ListUserPoliciesResult>
|
|
||||||
<IsTruncated>false</IsTruncated>
|
<IsTruncated>false</IsTruncated>
|
||||||
|
</ListUserPoliciesResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
|
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
@ -1240,11 +1506,23 @@ LIST_ACCESS_KEYS_TEMPLATE = """<ListAccessKeysResponse>
|
|||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</ListAccessKeysResponse>"""
|
</ListAccessKeysResponse>"""
|
||||||
|
|
||||||
|
|
||||||
|
GET_ACCESS_KEY_LAST_USED_TEMPLATE = """
|
||||||
|
<GetAccessKeyLastUsedResponse>
|
||||||
|
<GetAccessKeyLastUsedResult>
|
||||||
|
<UserName>{{ user_name }}</UserName>
|
||||||
|
<AccessKeyLastUsed>
|
||||||
|
<LastUsedDate>{{ last_used }}</LastUsedDate>
|
||||||
|
</AccessKeyLastUsed>
|
||||||
|
</GetAccessKeyLastUsedResult>
|
||||||
|
</GetAccessKeyLastUsedResponse>
|
||||||
|
"""
|
||||||
|
|
||||||
CREDENTIAL_REPORT_GENERATING = """
|
CREDENTIAL_REPORT_GENERATING = """
|
||||||
<GenerateCredentialReportResponse>
|
<GenerateCredentialReportResponse>
|
||||||
<GenerateCredentialReportResult>
|
<GenerateCredentialReportResult>
|
||||||
<state>STARTED</state>
|
<State>STARTED</State>
|
||||||
<description>No report exists. Starting a new report generation task</description>
|
<Description>No report exists. Starting a new report generation task</Description>
|
||||||
</GenerateCredentialReportResult>
|
</GenerateCredentialReportResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
|
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
|
||||||
@ -1253,7 +1531,7 @@ CREDENTIAL_REPORT_GENERATING = """
|
|||||||
|
|
||||||
CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
|
CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
|
||||||
<GenerateCredentialReportResult>
|
<GenerateCredentialReportResult>
|
||||||
<state>COMPLETE</state>
|
<State>COMPLETE</State>
|
||||||
</GenerateCredentialReportResult>
|
</GenerateCredentialReportResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
|
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
|
||||||
@ -1262,7 +1540,7 @@ CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
|
|||||||
|
|
||||||
CREDENTIAL_REPORT = """<GetCredentialReportResponse>
|
CREDENTIAL_REPORT = """<GetCredentialReportResponse>
|
||||||
<GetCredentialReportResult>
|
<GetCredentialReportResult>
|
||||||
<content>{{ report }}</content>
|
<Content>{{ report }}</Content>
|
||||||
<GeneratedTime>2015-02-02T20:02:02Z</GeneratedTime>
|
<GeneratedTime>2015-02-02T20:02:02Z</GeneratedTime>
|
||||||
<ReportFormat>text/csv</ReportFormat>
|
<ReportFormat>text/csv</ReportFormat>
|
||||||
</GetCredentialReportResult>
|
</GetCredentialReportResult>
|
||||||
@ -1277,7 +1555,7 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleRespon
|
|||||||
<InstanceProfiles>
|
<InstanceProfiles>
|
||||||
{% for profile in instance_profiles %}
|
{% for profile in instance_profiles %}
|
||||||
<member>
|
<member>
|
||||||
<Id>{{ profile.id }}</Id>
|
<InstanceProfileId>{{ profile.id }}</InstanceProfileId>
|
||||||
<Roles>
|
<Roles>
|
||||||
{% for role in profile.roles %}
|
{% for role in profile.roles %}
|
||||||
<member>
|
<member>
|
||||||
@ -1285,7 +1563,7 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleRespon
|
|||||||
<Arn>{{ role.arn }}</Arn>
|
<Arn>{{ role.arn }}</Arn>
|
||||||
<RoleName>{{ role.name }}</RoleName>
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
<AssumeRolePolicyDocument>{{ role.assume_policy_document }}</AssumeRolePolicyDocument>
|
<AssumeRolePolicyDocument>{{ role.assume_policy_document }}</AssumeRolePolicyDocument>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ role.create_date }}</CreateDate>
|
||||||
<RoleId>{{ role.id }}</RoleId>
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -1293,7 +1571,7 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleRespon
|
|||||||
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
||||||
<Path>{{ profile.path }}</Path>
|
<Path>{{ profile.path }}</Path>
|
||||||
<Arn>{{ profile.arn }}</Arn>
|
<Arn>{{ profile.arn }}</Arn>
|
||||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
<CreateDate>{{ profile.create_date }}</CreateDate>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</InstanceProfiles>
|
</InstanceProfiles>
|
||||||
@ -1376,13 +1654,24 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
|
|||||||
<UserDetailList>
|
<UserDetailList>
|
||||||
{% for user in users %}
|
{% for user in users %}
|
||||||
<member>
|
<member>
|
||||||
<GroupList />
|
<GroupList>
|
||||||
<AttachedManagedPolicies/>
|
{% for group in get_groups_for_user(user.name) %}
|
||||||
|
<member>{{ group.name }}</member>
|
||||||
|
{% endfor %}
|
||||||
|
</GroupList>
|
||||||
|
<AttachedManagedPolicies>
|
||||||
|
{% for policy in user.managed_policies %}
|
||||||
|
<member>
|
||||||
|
<PolicyName>{{ user.managed_policies[policy].name }}</PolicyName>
|
||||||
|
<PolicyArn>{{ policy }}</PolicyArn>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</AttachedManagedPolicies>
|
||||||
<UserId>{{ user.id }}</UserId>
|
<UserId>{{ user.id }}</UserId>
|
||||||
<Path>{{ user.path }}</Path>
|
<Path>{{ user.path }}</Path>
|
||||||
<UserName>{{ user.name }}</UserName>
|
<UserName>{{ user.name }}</UserName>
|
||||||
<Arn>{{ user.arn }}</Arn>
|
<Arn>{{ user.arn }}</Arn>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ user.created_iso_8601 }}</CreateDate>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</UserDetailList>
|
</UserDetailList>
|
||||||
@ -1391,37 +1680,59 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
|
|||||||
<member>
|
<member>
|
||||||
<GroupId>{{ group.id }}</GroupId>
|
<GroupId>{{ group.id }}</GroupId>
|
||||||
<AttachedManagedPolicies>
|
<AttachedManagedPolicies>
|
||||||
{% for policy in group.managed_policies %}
|
{% for policy_arn in group.managed_policies %}
|
||||||
<member>
|
<member>
|
||||||
<PolicyName>{{ policy.name }}</PolicyName>
|
<PolicyName>{{ group.managed_policies[policy_arn].name }}</PolicyName>
|
||||||
<PolicyArn>{{ policy.arn }}</PolicyArn>
|
<PolicyArn>{{ policy_arn }}</PolicyArn>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</AttachedManagedPolicies>
|
</AttachedManagedPolicies>
|
||||||
<GroupName>{{ group.name }}</GroupName>
|
<GroupName>{{ group.name }}</GroupName>
|
||||||
<Path>{{ group.path }}</Path>
|
<Path>{{ group.path }}</Path>
|
||||||
<Arn>{{ group.arn }}</Arn>
|
<Arn>{{ group.arn }}</Arn>
|
||||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
<CreateDate>{{ group.create_date }}</CreateDate>
|
||||||
<GroupPolicyList/>
|
<GroupPolicyList>
|
||||||
|
{% for policy in group.policies %}
|
||||||
|
<member>
|
||||||
|
<PolicyName>{{ policy }}</PolicyName>
|
||||||
|
<PolicyDocument>{{ group.get_policy(policy) }}</PolicyDocument>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</GroupPolicyList>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</GroupDetailList>
|
</GroupDetailList>
|
||||||
<RoleDetailList>
|
<RoleDetailList>
|
||||||
{% for role in roles %}
|
{% for role in roles %}
|
||||||
<member>
|
<member>
|
||||||
<RolePolicyList/>
|
<RolePolicyList>
|
||||||
<AttachedManagedPolicies>
|
{% for inline_policy in role.policies %}
|
||||||
{% for policy in role.managed_policies %}
|
|
||||||
<member>
|
<member>
|
||||||
<PolicyName>{{ policy.name }}</PolicyName>
|
<PolicyName>{{ inline_policy }}</PolicyName>
|
||||||
<PolicyArn>{{ policy.arn }}</PolicyArn>
|
<PolicyDocument>{{ role.policies[inline_policy] }}</PolicyDocument>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</RolePolicyList>
|
||||||
|
<AttachedManagedPolicies>
|
||||||
|
{% for policy_arn in role.managed_policies %}
|
||||||
|
<member>
|
||||||
|
<PolicyName>{{ role.managed_policies[policy_arn].name }}</PolicyName>
|
||||||
|
<PolicyArn>{{ policy_arn }}</PolicyArn>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</AttachedManagedPolicies>
|
</AttachedManagedPolicies>
|
||||||
|
<Tags>
|
||||||
|
{% for tag in role.get_tags() %}
|
||||||
|
<member>
|
||||||
|
<Key>{{ tag['Key'] }}</Key>
|
||||||
|
<Value>{{ tag['Value'] }}</Value>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Tags>
|
||||||
<InstanceProfileList>
|
<InstanceProfileList>
|
||||||
{% for profile in instance_profiles %}
|
{% for profile in instance_profiles %}
|
||||||
<member>
|
<member>
|
||||||
<Id>{{ profile.id }}</Id>
|
<InstanceProfileId>{{ profile.id }}</InstanceProfileId>
|
||||||
<Roles>
|
<Roles>
|
||||||
{% for role in profile.roles %}
|
{% for role in profile.roles %}
|
||||||
<member>
|
<member>
|
||||||
@ -1429,7 +1740,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
|
|||||||
<Arn>{{ role.arn }}</Arn>
|
<Arn>{{ role.arn }}</Arn>
|
||||||
<RoleName>{{ role.name }}</RoleName>
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
<CreateDate>{{ role.create_date }}</CreateDate>
|
||||||
<RoleId>{{ role.id }}</RoleId>
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -1437,7 +1748,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
|
|||||||
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
||||||
<Path>{{ profile.path }}</Path>
|
<Path>{{ profile.path }}</Path>
|
||||||
<Arn>{{ profile.arn }}</Arn>
|
<Arn>{{ profile.arn }}</Arn>
|
||||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
<CreateDate>{{ profile.create_date }}</CreateDate>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</InstanceProfileList>
|
</InstanceProfileList>
|
||||||
@ -1445,7 +1756,7 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
|
|||||||
<Arn>{{ role.arn }}</Arn>
|
<Arn>{{ role.arn }}</Arn>
|
||||||
<RoleName>{{ role.name }}</RoleName>
|
<RoleName>{{ role.name }}</RoleName>
|
||||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||||
<CreateDate>2014-07-30T17:09:20Z</CreateDate>
|
<CreateDate>{{ role.create_date }}</CreateDate>
|
||||||
<RoleId>{{ role.id }}</RoleId>
|
<RoleId>{{ role.id }}</RoleId>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -1458,25 +1769,20 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
|
|||||||
<PolicyId>{{ policy.id }}</PolicyId>
|
<PolicyId>{{ policy.id }}</PolicyId>
|
||||||
<Path>{{ policy.path }}</Path>
|
<Path>{{ policy.path }}</Path>
|
||||||
<PolicyVersionList>
|
<PolicyVersionList>
|
||||||
|
{% for policy_version in policy.versions %}
|
||||||
<member>
|
<member>
|
||||||
<Document>
|
<Document>{{ policy_version.document }}</Document>
|
||||||
{"Version":"2012-10-17","Statement":{"Effect":"Allow",
|
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
|
||||||
"Action":["iam:CreatePolicy","iam:CreatePolicyVersion",
|
<VersionId>{{ policy_version.version_id }}</VersionId>
|
||||||
"iam:DeletePolicy","iam:DeletePolicyVersion","iam:GetPolicy",
|
<CreateDate>{{ policy_version.create_datetime }}</CreateDate>
|
||||||
"iam:GetPolicyVersion","iam:ListPolicies",
|
|
||||||
"iam:ListPolicyVersions","iam:SetDefaultPolicyVersion"],
|
|
||||||
"Resource":"*"}}
|
|
||||||
</Document>
|
|
||||||
<IsDefaultVersion>true</IsDefaultVersion>
|
|
||||||
<VersionId>v1</VersionId>
|
|
||||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
|
||||||
</member>
|
</member>
|
||||||
|
{% endfor %}
|
||||||
</PolicyVersionList>
|
</PolicyVersionList>
|
||||||
<Arn>{{ policy.arn }}</Arn>
|
<Arn>{{ policy.arn }}</Arn>
|
||||||
<AttachmentCount>1</AttachmentCount>
|
<AttachmentCount>1</AttachmentCount>
|
||||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
<CreateDate>{{ policy.create_datetime }}</CreateDate>
|
||||||
<IsAttachable>true</IsAttachable>
|
<IsAttachable>true</IsAttachable>
|
||||||
<UpdateDate>2012-05-09T16:27:11Z</UpdateDate>
|
<UpdateDate>{{ policy.update_datetime }}</UpdateDate>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</Policies>
|
</Policies>
|
||||||
@ -1485,3 +1791,139 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
|
|||||||
<RequestId>92e79ae7-7399-11e4-8c85-4b53eEXAMPLE</RequestId>
|
<RequestId>92e79ae7-7399-11e4-8c85-4b53eEXAMPLE</RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</GetAccountAuthorizationDetailsResponse>"""
|
</GetAccountAuthorizationDetailsResponse>"""
|
||||||
|
|
||||||
|
CREATE_SAML_PROVIDER_TEMPLATE = """<CreateSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<CreateSAMLProviderResult>
|
||||||
|
<SAMLProviderArn>{{ saml_provider.arn }}</SAMLProviderArn>
|
||||||
|
</CreateSAMLProviderResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</CreateSAMLProviderResponse>"""
|
||||||
|
|
||||||
|
LIST_SAML_PROVIDERS_TEMPLATE = """<ListSAMLProvidersResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<ListSAMLProvidersResult>
|
||||||
|
<SAMLProviderList>
|
||||||
|
{% for saml_provider in saml_providers %}
|
||||||
|
<member>
|
||||||
|
<Arn>{{ saml_provider.arn }}</Arn>
|
||||||
|
<ValidUntil>2032-05-09T16:27:11Z</ValidUntil>
|
||||||
|
<CreateDate>2012-05-09T16:27:03Z</CreateDate>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</SAMLProviderList>
|
||||||
|
</ListSAMLProvidersResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>fd74fa8d-99f3-11e1-a4c3-27EXAMPLE804</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</ListSAMLProvidersResponse>"""
|
||||||
|
|
||||||
|
GET_SAML_PROVIDER_TEMPLATE = """<GetSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<GetSAMLProviderResult>
|
||||||
|
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
||||||
|
<ValidUntil>2015-12-31T21:59:59Z</ValidUntil>
|
||||||
|
<SAMLMetadataDocument>{{ saml_provider.saml_metadata_document }}</SAMLMetadataDocument>
|
||||||
|
</GetSAMLProviderResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</GetSAMLProviderResponse>"""
|
||||||
|
|
||||||
|
DELETE_SAML_PROVIDER_TEMPLATE = """<DeleteSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>c749ee7f-99ef-11e1-a4c3-27EXAMPLE804</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</DeleteSAMLProviderResponse>"""
|
||||||
|
|
||||||
|
UPDATE_SAML_PROVIDER_TEMPLATE = """<UpdateSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<UpdateSAMLProviderResult>
|
||||||
|
<SAMLProviderArn>{{ saml_provider.arn }}</SAMLProviderArn>
|
||||||
|
</UpdateSAMLProviderResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</UpdateSAMLProviderResponse>"""
|
||||||
|
|
||||||
|
UPLOAD_SIGNING_CERTIFICATE_TEMPLATE = """<UploadSigningCertificateResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<UploadSigningCertificateResult>
|
||||||
|
<Certificate>
|
||||||
|
<UserName>{{ cert.user_name }}</UserName>
|
||||||
|
<CertificateId>{{ cert.id }}</CertificateId>
|
||||||
|
<CertificateBody>{{ cert.body }}</CertificateBody>
|
||||||
|
<Status>{{ cert.status }}</Status>
|
||||||
|
</Certificate>
|
||||||
|
</UploadSigningCertificateResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</UploadSigningCertificateResponse>"""
|
||||||
|
|
||||||
|
|
||||||
|
UPDATE_SIGNING_CERTIFICATE_TEMPLATE = """<UpdateSigningCertificateResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</UpdateSigningCertificateResponse>"""
|
||||||
|
|
||||||
|
|
||||||
|
DELETE_SIGNING_CERTIFICATE_TEMPLATE = """<DeleteSigningCertificateResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</DeleteSigningCertificateResponse>"""
|
||||||
|
|
||||||
|
|
||||||
|
LIST_SIGNING_CERTIFICATES_TEMPLATE = """<ListSigningCertificatesResponse>
|
||||||
|
<ListSigningCertificatesResult>
|
||||||
|
<UserName>{{ user_name }}</UserName>
|
||||||
|
<Certificates>
|
||||||
|
{% for cert in certificates %}
|
||||||
|
<member>
|
||||||
|
<UserName>{{ user_name }}</UserName>
|
||||||
|
<CertificateId>{{ cert.id }}</CertificateId>
|
||||||
|
<CertificateBody>{{ cert.body }}</CertificateBody>
|
||||||
|
<Status>{{ cert.status }}</Status>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Certificates>
|
||||||
|
<IsTruncated>false</IsTruncated>
|
||||||
|
</ListSigningCertificatesResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</ListSigningCertificatesResponse>"""
|
||||||
|
|
||||||
|
|
||||||
|
TAG_ROLE_TEMPLATE = """<TagRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</TagRoleResponse>"""
|
||||||
|
|
||||||
|
|
||||||
|
LIST_ROLE_TAG_TEMPLATE = """<ListRoleTagsResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<ListRoleTagsResult>
|
||||||
|
<IsTruncated>{{ 'true' if marker else 'false' }}</IsTruncated>
|
||||||
|
{% if marker %}
|
||||||
|
<Marker>{{ marker }}</Marker>
|
||||||
|
{% endif %}
|
||||||
|
<Tags>
|
||||||
|
{% for tag in tags %}
|
||||||
|
<member>
|
||||||
|
<Key>{{ tag['Key'] }}</Key>
|
||||||
|
<Value>{{ tag['Value'] }}</Value>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Tags>
|
||||||
|
</ListRoleTagsResult>
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</ListRoleTagsResponse>"""
|
||||||
|
|
||||||
|
|
||||||
|
UNTAG_ROLE_TEMPLATE = """<UntagRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>EXAMPLE8-90ab-cdef-fedc-ba987EXAMPLE</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
</UntagRoleResponse>"""
|
||||||
|
@ -12,8 +12,7 @@ def random_alphanumeric(length):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def random_resource_id():
|
def random_resource_id(size=20):
|
||||||
size = 20
|
|
||||||
chars = list(range(10)) + list(string.ascii_lowercase)
|
chars = list(range(10)) + list(string.ascii_lowercase)
|
||||||
|
|
||||||
return ''.join(six.text_type(random.choice(chars)) for x in range(size))
|
return ''.join(six.text_type(random.choice(chars)) for x in range(size))
|
||||||
|
@ -31,3 +31,20 @@ class VersionConflictException(IoTClientError):
|
|||||||
'VersionConflictException',
|
'VersionConflictException',
|
||||||
'The version for thing %s does not match the expected version.' % name
|
'The version for thing %s does not match the expected version.' % name
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateStateException(IoTClientError):
|
||||||
|
def __init__(self, msg, cert_id):
|
||||||
|
self.code = 406
|
||||||
|
super(CertificateStateException, self).__init__(
|
||||||
|
'CertificateStateException',
|
||||||
|
'%s Id: %s' % (msg, cert_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DeleteConflictException(IoTClientError):
|
||||||
|
def __init__(self, msg):
|
||||||
|
self.code = 409
|
||||||
|
super(DeleteConflictException, self).__init__(
|
||||||
|
'DeleteConflictException', msg
|
||||||
|
)
|
||||||
|
@ -13,6 +13,8 @@ import boto3
|
|||||||
|
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
|
CertificateStateException,
|
||||||
|
DeleteConflictException,
|
||||||
ResourceNotFoundException,
|
ResourceNotFoundException,
|
||||||
InvalidRequestException,
|
InvalidRequestException,
|
||||||
VersionConflictException
|
VersionConflictException
|
||||||
@ -378,7 +380,25 @@ class IoTBackend(BaseBackend):
|
|||||||
return certificate, key_pair
|
return certificate, key_pair
|
||||||
|
|
||||||
def delete_certificate(self, certificate_id):
|
def delete_certificate(self, certificate_id):
|
||||||
self.describe_certificate(certificate_id)
|
cert = self.describe_certificate(certificate_id)
|
||||||
|
if cert.status == 'ACTIVE':
|
||||||
|
raise CertificateStateException(
|
||||||
|
'Certificate must be deactivated (not ACTIVE) before deletion.', certificate_id)
|
||||||
|
|
||||||
|
certs = [k[0] for k, v in self.principal_things.items()
|
||||||
|
if self._get_principal(k[0]).certificate_id == certificate_id]
|
||||||
|
if len(certs) > 0:
|
||||||
|
raise DeleteConflictException(
|
||||||
|
'Things must be detached before deletion (arn: %s)' % certs[0]
|
||||||
|
)
|
||||||
|
|
||||||
|
certs = [k[0] for k, v in self.principal_policies.items()
|
||||||
|
if self._get_principal(k[0]).certificate_id == certificate_id]
|
||||||
|
if len(certs) > 0:
|
||||||
|
raise DeleteConflictException(
|
||||||
|
'Certificate policies must be detached before deletion (arn: %s)' % certs[0]
|
||||||
|
)
|
||||||
|
|
||||||
del self.certificates[certificate_id]
|
del self.certificates[certificate_id]
|
||||||
|
|
||||||
def describe_certificate(self, certificate_id):
|
def describe_certificate(self, certificate_id):
|
||||||
@ -411,6 +431,14 @@ class IoTBackend(BaseBackend):
|
|||||||
return policies[0]
|
return policies[0]
|
||||||
|
|
||||||
def delete_policy(self, policy_name):
|
def delete_policy(self, policy_name):
|
||||||
|
|
||||||
|
policies = [k[1] for k, v in self.principal_policies.items() if k[1] == policy_name]
|
||||||
|
if len(policies) > 0:
|
||||||
|
raise DeleteConflictException(
|
||||||
|
'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)'
|
||||||
|
% policy_name
|
||||||
|
)
|
||||||
|
|
||||||
policy = self.get_policy(policy_name)
|
policy = self.get_policy(policy_name)
|
||||||
del self.policies[policy.name]
|
del self.policies[policy.name]
|
||||||
|
|
||||||
@ -429,6 +457,14 @@ class IoTBackend(BaseBackend):
|
|||||||
pass
|
pass
|
||||||
raise ResourceNotFoundException()
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
|
def attach_policy(self, policy_name, target):
|
||||||
|
principal = self._get_principal(target)
|
||||||
|
policy = self.get_policy(policy_name)
|
||||||
|
k = (target, policy_name)
|
||||||
|
if k in self.principal_policies:
|
||||||
|
return
|
||||||
|
self.principal_policies[k] = (principal, policy)
|
||||||
|
|
||||||
def attach_principal_policy(self, policy_name, principal_arn):
|
def attach_principal_policy(self, policy_name, principal_arn):
|
||||||
principal = self._get_principal(principal_arn)
|
principal = self._get_principal(principal_arn)
|
||||||
policy = self.get_policy(policy_name)
|
policy = self.get_policy(policy_name)
|
||||||
@ -437,6 +473,15 @@ class IoTBackend(BaseBackend):
|
|||||||
return
|
return
|
||||||
self.principal_policies[k] = (principal, policy)
|
self.principal_policies[k] = (principal, policy)
|
||||||
|
|
||||||
|
def detach_policy(self, policy_name, target):
|
||||||
|
# this may raises ResourceNotFoundException
|
||||||
|
self._get_principal(target)
|
||||||
|
self.get_policy(policy_name)
|
||||||
|
k = (target, policy_name)
|
||||||
|
if k not in self.principal_policies:
|
||||||
|
raise ResourceNotFoundException()
|
||||||
|
del self.principal_policies[k]
|
||||||
|
|
||||||
def detach_principal_policy(self, policy_name, principal_arn):
|
def detach_principal_policy(self, policy_name, principal_arn):
|
||||||
# this may raises ResourceNotFoundException
|
# this may raises ResourceNotFoundException
|
||||||
self._get_principal(principal_arn)
|
self._get_principal(principal_arn)
|
||||||
|
@ -224,6 +224,15 @@ class IoTResponse(BaseResponse):
|
|||||||
)
|
)
|
||||||
return json.dumps(dict())
|
return json.dumps(dict())
|
||||||
|
|
||||||
|
def attach_policy(self):
|
||||||
|
policy_name = self._get_param("policyName")
|
||||||
|
target = self._get_param('target')
|
||||||
|
self.iot_backend.attach_policy(
|
||||||
|
policy_name=policy_name,
|
||||||
|
target=target,
|
||||||
|
)
|
||||||
|
return json.dumps(dict())
|
||||||
|
|
||||||
def attach_principal_policy(self):
|
def attach_principal_policy(self):
|
||||||
policy_name = self._get_param("policyName")
|
policy_name = self._get_param("policyName")
|
||||||
principal = self.headers.get('x-amzn-iot-principal')
|
principal = self.headers.get('x-amzn-iot-principal')
|
||||||
@ -233,6 +242,15 @@ class IoTResponse(BaseResponse):
|
|||||||
)
|
)
|
||||||
return json.dumps(dict())
|
return json.dumps(dict())
|
||||||
|
|
||||||
|
def detach_policy(self):
|
||||||
|
policy_name = self._get_param("policyName")
|
||||||
|
target = self._get_param('target')
|
||||||
|
self.iot_backend.detach_policy(
|
||||||
|
policy_name=policy_name,
|
||||||
|
target=target,
|
||||||
|
)
|
||||||
|
return json.dumps(dict())
|
||||||
|
|
||||||
def detach_principal_policy(self):
|
def detach_principal_policy(self):
|
||||||
policy_name = self._get_param("policyName")
|
policy_name = self._get_param("policyName")
|
||||||
principal = self.headers.get('x-amzn-iot-principal')
|
principal = self.headers.get('x-amzn-iot-principal')
|
||||||
|
36
moto/kms/exceptions.py
Normal file
36
moto/kms/exceptions.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
from moto.core.exceptions import JsonRESTError
|
||||||
|
|
||||||
|
|
||||||
|
class NotFoundException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, message):
|
||||||
|
super(NotFoundException, self).__init__(
|
||||||
|
"NotFoundException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, message):
|
||||||
|
super(ValidationException, self).__init__(
|
||||||
|
"ValidationException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class AlreadyExistsException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, message):
|
||||||
|
super(AlreadyExistsException, self).__init__(
|
||||||
|
"AlreadyExistsException", message)
|
||||||
|
|
||||||
|
|
||||||
|
class NotAuthorizedException(JsonRESTError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(NotAuthorizedException, self).__init__(
|
||||||
|
"NotAuthorizedException", None)
|
||||||
|
|
||||||
|
self.description = '{"__type":"NotAuthorizedException"}'
|
@ -1,5 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
import boto.kms
|
import boto.kms
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
||||||
@ -21,6 +22,7 @@ class Key(BaseModel):
|
|||||||
self.account_id = "0123456789012"
|
self.account_id = "0123456789012"
|
||||||
self.key_rotation_status = False
|
self.key_rotation_status = False
|
||||||
self.deletion_date = None
|
self.deletion_date = None
|
||||||
|
self.tags = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def physical_resource_id(self):
|
def physical_resource_id(self):
|
||||||
@ -35,7 +37,7 @@ class Key(BaseModel):
|
|||||||
"KeyMetadata": {
|
"KeyMetadata": {
|
||||||
"AWSAccountId": self.account_id,
|
"AWSAccountId": self.account_id,
|
||||||
"Arn": self.arn,
|
"Arn": self.arn,
|
||||||
"CreationDate": "2015-01-01 00:00:00",
|
"CreationDate": datetime.strftime(datetime.utcnow(), "%s"),
|
||||||
"Description": self.description,
|
"Description": self.description,
|
||||||
"Enabled": self.enabled,
|
"Enabled": self.enabled,
|
||||||
"KeyId": self.id,
|
"KeyId": self.id,
|
||||||
@ -63,7 +65,6 @@ class Key(BaseModel):
|
|||||||
)
|
)
|
||||||
key.key_rotation_status = properties['EnableKeyRotation']
|
key.key_rotation_status = properties['EnableKeyRotation']
|
||||||
key.enabled = properties['Enabled']
|
key.enabled = properties['Enabled']
|
||||||
|
|
||||||
return key
|
return key
|
||||||
|
|
||||||
def get_cfn_attribute(self, attribute_name):
|
def get_cfn_attribute(self, attribute_name):
|
||||||
@ -84,6 +85,18 @@ class KmsBackend(BaseBackend):
|
|||||||
self.keys[key.id] = key
|
self.keys[key.id] = key
|
||||||
return key
|
return key
|
||||||
|
|
||||||
|
def update_key_description(self, key_id, description):
|
||||||
|
key = self.keys[self.get_key_id(key_id)]
|
||||||
|
key.description = description
|
||||||
|
|
||||||
|
def tag_resource(self, key_id, tags):
|
||||||
|
key = self.keys[self.get_key_id(key_id)]
|
||||||
|
key.tags = tags
|
||||||
|
|
||||||
|
def list_resource_tags(self, key_id):
|
||||||
|
key = self.keys[self.get_key_id(key_id)]
|
||||||
|
return key.tags
|
||||||
|
|
||||||
def delete_key(self, key_id):
|
def delete_key(self, key_id):
|
||||||
if key_id in self.keys:
|
if key_id in self.keys:
|
||||||
if key_id in self.key_to_aliases:
|
if key_id in self.key_to_aliases:
|
||||||
@ -147,28 +160,39 @@ class KmsBackend(BaseBackend):
|
|||||||
return self.keys[self.get_key_id(key_id)].policy
|
return self.keys[self.get_key_id(key_id)].policy
|
||||||
|
|
||||||
def disable_key(self, key_id):
|
def disable_key(self, key_id):
|
||||||
if key_id in self.keys:
|
|
||||||
self.keys[key_id].enabled = False
|
self.keys[key_id].enabled = False
|
||||||
self.keys[key_id].key_state = 'Disabled'
|
self.keys[key_id].key_state = 'Disabled'
|
||||||
|
|
||||||
def enable_key(self, key_id):
|
def enable_key(self, key_id):
|
||||||
if key_id in self.keys:
|
|
||||||
self.keys[key_id].enabled = True
|
self.keys[key_id].enabled = True
|
||||||
self.keys[key_id].key_state = 'Enabled'
|
self.keys[key_id].key_state = 'Enabled'
|
||||||
|
|
||||||
def cancel_key_deletion(self, key_id):
|
def cancel_key_deletion(self, key_id):
|
||||||
if key_id in self.keys:
|
|
||||||
self.keys[key_id].key_state = 'Disabled'
|
self.keys[key_id].key_state = 'Disabled'
|
||||||
self.keys[key_id].deletion_date = None
|
self.keys[key_id].deletion_date = None
|
||||||
|
|
||||||
def schedule_key_deletion(self, key_id, pending_window_in_days):
|
def schedule_key_deletion(self, key_id, pending_window_in_days):
|
||||||
if key_id in self.keys:
|
|
||||||
if 7 <= pending_window_in_days <= 30:
|
if 7 <= pending_window_in_days <= 30:
|
||||||
self.keys[key_id].enabled = False
|
self.keys[key_id].enabled = False
|
||||||
self.keys[key_id].key_state = 'PendingDeletion'
|
self.keys[key_id].key_state = 'PendingDeletion'
|
||||||
self.keys[key_id].deletion_date = datetime.now() + timedelta(days=pending_window_in_days)
|
self.keys[key_id].deletion_date = datetime.now() + timedelta(days=pending_window_in_days)
|
||||||
return iso_8601_datetime_without_milliseconds(self.keys[key_id].deletion_date)
|
return iso_8601_datetime_without_milliseconds(self.keys[key_id].deletion_date)
|
||||||
|
|
||||||
|
def generate_data_key(self, key_id, encryption_context, number_of_bytes, key_spec, grant_tokens):
|
||||||
|
key = self.keys[self.get_key_id(key_id)]
|
||||||
|
|
||||||
|
if key_spec:
|
||||||
|
if key_spec == 'AES_128':
|
||||||
|
bytes = 16
|
||||||
|
else:
|
||||||
|
bytes = 32
|
||||||
|
else:
|
||||||
|
bytes = number_of_bytes
|
||||||
|
|
||||||
|
plaintext = os.urandom(bytes)
|
||||||
|
|
||||||
|
return plaintext, key.arn
|
||||||
|
|
||||||
|
|
||||||
kms_backends = {}
|
kms_backends = {}
|
||||||
for region in boto.kms.regions():
|
for region in boto.kms.regions():
|
||||||
|
@ -5,11 +5,9 @@ import json
|
|||||||
import re
|
import re
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from boto.exception import JSONResponseError
|
|
||||||
from boto.kms.exceptions import AlreadyExistsException, NotFoundException
|
|
||||||
|
|
||||||
from moto.core.responses import BaseResponse
|
from moto.core.responses import BaseResponse
|
||||||
from .models import kms_backends
|
from .models import kms_backends
|
||||||
|
from .exceptions import NotFoundException, ValidationException, AlreadyExistsException, NotAuthorizedException
|
||||||
|
|
||||||
reserved_aliases = [
|
reserved_aliases = [
|
||||||
'alias/aws/ebs',
|
'alias/aws/ebs',
|
||||||
@ -38,6 +36,28 @@ class KmsResponse(BaseResponse):
|
|||||||
policy, key_usage, description, self.region)
|
policy, key_usage, description, self.region)
|
||||||
return json.dumps(key.to_dict())
|
return json.dumps(key.to_dict())
|
||||||
|
|
||||||
|
def update_key_description(self):
|
||||||
|
key_id = self.parameters.get('KeyId')
|
||||||
|
description = self.parameters.get('Description')
|
||||||
|
|
||||||
|
self.kms_backend.update_key_description(key_id, description)
|
||||||
|
return json.dumps(None)
|
||||||
|
|
||||||
|
def tag_resource(self):
|
||||||
|
key_id = self.parameters.get('KeyId')
|
||||||
|
tags = self.parameters.get('Tags')
|
||||||
|
self.kms_backend.tag_resource(key_id, tags)
|
||||||
|
return json.dumps({})
|
||||||
|
|
||||||
|
def list_resource_tags(self):
|
||||||
|
key_id = self.parameters.get('KeyId')
|
||||||
|
tags = self.kms_backend.list_resource_tags(key_id)
|
||||||
|
return json.dumps({
|
||||||
|
"Tags": tags,
|
||||||
|
"NextMarker": None,
|
||||||
|
"Truncated": False,
|
||||||
|
})
|
||||||
|
|
||||||
def describe_key(self):
|
def describe_key(self):
|
||||||
key_id = self.parameters.get('KeyId')
|
key_id = self.parameters.get('KeyId')
|
||||||
try:
|
try:
|
||||||
@ -66,36 +86,28 @@ class KmsResponse(BaseResponse):
|
|||||||
def create_alias(self):
|
def create_alias(self):
|
||||||
alias_name = self.parameters['AliasName']
|
alias_name = self.parameters['AliasName']
|
||||||
target_key_id = self.parameters['TargetKeyId']
|
target_key_id = self.parameters['TargetKeyId']
|
||||||
region = self.region
|
|
||||||
|
|
||||||
if not alias_name.startswith('alias/'):
|
if not alias_name.startswith('alias/'):
|
||||||
raise JSONResponseError(400, 'Bad Request',
|
raise ValidationException('Invalid identifier')
|
||||||
body={'message': 'Invalid identifier', '__type': 'ValidationException'})
|
|
||||||
|
|
||||||
if alias_name in reserved_aliases:
|
if alias_name in reserved_aliases:
|
||||||
raise JSONResponseError(400, 'Bad Request', body={
|
raise NotAuthorizedException()
|
||||||
'__type': 'NotAuthorizedException'})
|
|
||||||
|
|
||||||
if ':' in alias_name:
|
if ':' in alias_name:
|
||||||
raise JSONResponseError(400, 'Bad Request', body={
|
raise ValidationException('{alias_name} contains invalid characters for an alias'.format(alias_name=alias_name))
|
||||||
'message': '{alias_name} contains invalid characters for an alias'.format(**locals()),
|
|
||||||
'__type': 'ValidationException'})
|
|
||||||
|
|
||||||
if not re.match(r'^[a-zA-Z0-9:/_-]+$', alias_name):
|
if not re.match(r'^[a-zA-Z0-9:/_-]+$', alias_name):
|
||||||
raise JSONResponseError(400, 'Bad Request', body={
|
raise ValidationException("1 validation error detected: Value '{alias_name}' at 'aliasName' "
|
||||||
'message': "1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$"
|
"failed to satisfy constraint: Member must satisfy regular "
|
||||||
.format(**locals()),
|
"expression pattern: ^[a-zA-Z0-9:/_-]+$"
|
||||||
'__type': 'ValidationException'})
|
.format(alias_name=alias_name))
|
||||||
|
|
||||||
if self.kms_backend.alias_exists(target_key_id):
|
if self.kms_backend.alias_exists(target_key_id):
|
||||||
raise JSONResponseError(400, 'Bad Request', body={
|
raise ValidationException('Aliases must refer to keys. Not aliases')
|
||||||
'message': 'Aliases must refer to keys. Not aliases',
|
|
||||||
'__type': 'ValidationException'})
|
|
||||||
|
|
||||||
if self.kms_backend.alias_exists(alias_name):
|
if self.kms_backend.alias_exists(alias_name):
|
||||||
raise AlreadyExistsException(400, 'Bad Request', body={
|
raise AlreadyExistsException('An alias with the name arn:aws:kms:{region}:012345678912:{alias_name} '
|
||||||
'message': 'An alias with the name arn:aws:kms:{region}:012345678912:{alias_name} already exists'
|
'already exists'.format(region=self.region, alias_name=alias_name))
|
||||||
.format(**locals()), '__type': 'AlreadyExistsException'})
|
|
||||||
|
|
||||||
self.kms_backend.add_alias(target_key_id, alias_name)
|
self.kms_backend.add_alias(target_key_id, alias_name)
|
||||||
|
|
||||||
@ -103,16 +115,13 @@ class KmsResponse(BaseResponse):
|
|||||||
|
|
||||||
def delete_alias(self):
|
def delete_alias(self):
|
||||||
alias_name = self.parameters['AliasName']
|
alias_name = self.parameters['AliasName']
|
||||||
region = self.region
|
|
||||||
|
|
||||||
if not alias_name.startswith('alias/'):
|
if not alias_name.startswith('alias/'):
|
||||||
raise JSONResponseError(400, 'Bad Request',
|
raise ValidationException('Invalid identifier')
|
||||||
body={'message': 'Invalid identifier', '__type': 'ValidationException'})
|
|
||||||
|
|
||||||
if not self.kms_backend.alias_exists(alias_name):
|
if not self.kms_backend.alias_exists(alias_name):
|
||||||
raise NotFoundException(400, 'Bad Request', body={
|
raise NotFoundException('Alias arn:aws:kms:{region}:012345678912:'
|
||||||
'message': 'Alias arn:aws:kms:{region}:012345678912:{alias_name} is not found.'.format(**locals()),
|
'{alias_name} is not found.'.format(region=self.region, alias_name=alias_name))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
|
|
||||||
self.kms_backend.delete_alias(alias_name)
|
self.kms_backend.delete_alias(alias_name)
|
||||||
|
|
||||||
@ -150,9 +159,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
self.kms_backend.enable_key_rotation(key_id)
|
self.kms_backend.enable_key_rotation(key_id)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
|
|
||||||
return json.dumps(None)
|
return json.dumps(None)
|
||||||
|
|
||||||
@ -162,9 +170,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
self.kms_backend.disable_key_rotation(key_id)
|
self.kms_backend.disable_key_rotation(key_id)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
return json.dumps(None)
|
return json.dumps(None)
|
||||||
|
|
||||||
def get_key_rotation_status(self):
|
def get_key_rotation_status(self):
|
||||||
@ -173,9 +180,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
rotation_enabled = self.kms_backend.get_key_rotation_status(key_id)
|
rotation_enabled = self.kms_backend.get_key_rotation_status(key_id)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
return json.dumps({'KeyRotationEnabled': rotation_enabled})
|
return json.dumps({'KeyRotationEnabled': rotation_enabled})
|
||||||
|
|
||||||
def put_key_policy(self):
|
def put_key_policy(self):
|
||||||
@ -188,9 +194,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
self.kms_backend.put_key_policy(key_id, policy)
|
self.kms_backend.put_key_policy(key_id, policy)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
|
|
||||||
return json.dumps(None)
|
return json.dumps(None)
|
||||||
|
|
||||||
@ -203,9 +208,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
return json.dumps({'Policy': self.kms_backend.get_key_policy(key_id)})
|
return json.dumps({'Policy': self.kms_backend.get_key_policy(key_id)})
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
|
|
||||||
def list_key_policies(self):
|
def list_key_policies(self):
|
||||||
key_id = self.parameters.get('KeyId')
|
key_id = self.parameters.get('KeyId')
|
||||||
@ -213,9 +217,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
self.kms_backend.describe_key(key_id)
|
self.kms_backend.describe_key(key_id)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
|
|
||||||
return json.dumps({'Truncated': False, 'PolicyNames': ['default']})
|
return json.dumps({'Truncated': False, 'PolicyNames': ['default']})
|
||||||
|
|
||||||
@ -227,11 +230,17 @@ class KmsResponse(BaseResponse):
|
|||||||
value = self.parameters.get("Plaintext")
|
value = self.parameters.get("Plaintext")
|
||||||
if isinstance(value, six.text_type):
|
if isinstance(value, six.text_type):
|
||||||
value = value.encode('utf-8')
|
value = value.encode('utf-8')
|
||||||
return json.dumps({"CiphertextBlob": base64.b64encode(value).decode("utf-8")})
|
return json.dumps({"CiphertextBlob": base64.b64encode(value).decode("utf-8"), 'KeyId': 'key_id'})
|
||||||
|
|
||||||
def decrypt(self):
|
def decrypt(self):
|
||||||
|
# TODO refuse decode if EncryptionContext is not the same as when it was encrypted / generated
|
||||||
|
|
||||||
value = self.parameters.get("CiphertextBlob")
|
value = self.parameters.get("CiphertextBlob")
|
||||||
|
try:
|
||||||
return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8")})
|
return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8")})
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# Generate data key will produce random bytes which when decrypted is still returned as base64
|
||||||
|
return json.dumps({"Plaintext": value})
|
||||||
|
|
||||||
def disable_key(self):
|
def disable_key(self):
|
||||||
key_id = self.parameters.get('KeyId')
|
key_id = self.parameters.get('KeyId')
|
||||||
@ -239,9 +248,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
self.kms_backend.disable_key(key_id)
|
self.kms_backend.disable_key(key_id)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
return json.dumps(None)
|
return json.dumps(None)
|
||||||
|
|
||||||
def enable_key(self):
|
def enable_key(self):
|
||||||
@ -250,9 +258,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
self.kms_backend.enable_key(key_id)
|
self.kms_backend.enable_key(key_id)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
return json.dumps(None)
|
return json.dumps(None)
|
||||||
|
|
||||||
def cancel_key_deletion(self):
|
def cancel_key_deletion(self):
|
||||||
@ -261,9 +268,8 @@ class KmsResponse(BaseResponse):
|
|||||||
try:
|
try:
|
||||||
self.kms_backend.cancel_key_deletion(key_id)
|
self.kms_backend.cancel_key_deletion(key_id)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
return json.dumps({'KeyId': key_id})
|
return json.dumps({'KeyId': key_id})
|
||||||
|
|
||||||
def schedule_key_deletion(self):
|
def schedule_key_deletion(self):
|
||||||
@ -279,19 +285,62 @@ class KmsResponse(BaseResponse):
|
|||||||
'DeletionDate': self.kms_backend.schedule_key_deletion(key_id, pending_window_in_days)
|
'DeletionDate': self.kms_backend.schedule_key_deletion(key_id, pending_window_in_days)
|
||||||
})
|
})
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("Key 'arn:aws:kms:{region}:012345678912:key/"
|
||||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
"{key_id}' does not exist".format(region=self.region, key_id=key_id))
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
|
def generate_data_key(self):
|
||||||
|
key_id = self.parameters.get('KeyId')
|
||||||
|
encryption_context = self.parameters.get('EncryptionContext')
|
||||||
|
number_of_bytes = self.parameters.get('NumberOfBytes')
|
||||||
|
key_spec = self.parameters.get('KeySpec')
|
||||||
|
grant_tokens = self.parameters.get('GrantTokens')
|
||||||
|
|
||||||
|
# Param validation
|
||||||
|
if key_id.startswith('alias'):
|
||||||
|
if self.kms_backend.get_key_id_from_alias(key_id) is None:
|
||||||
|
raise NotFoundException('Alias arn:aws:kms:{region}:012345678912:{alias_name} is not found.'.format(
|
||||||
|
region=self.region, alias_name=key_id))
|
||||||
|
else:
|
||||||
|
if self.kms_backend.get_key_id(key_id) not in self.kms_backend.keys:
|
||||||
|
raise NotFoundException('Invalid keyId')
|
||||||
|
|
||||||
|
if number_of_bytes and (number_of_bytes > 1024 or number_of_bytes < 0):
|
||||||
|
raise ValidationException("1 validation error detected: Value '2048' at 'numberOfBytes' failed "
|
||||||
|
"to satisfy constraint: Member must have value less than or "
|
||||||
|
"equal to 1024")
|
||||||
|
|
||||||
|
if key_spec and key_spec not in ('AES_256', 'AES_128'):
|
||||||
|
raise ValidationException("1 validation error detected: Value 'AES_257' at 'keySpec' failed "
|
||||||
|
"to satisfy constraint: Member must satisfy enum value set: "
|
||||||
|
"[AES_256, AES_128]")
|
||||||
|
if not key_spec and not number_of_bytes:
|
||||||
|
raise ValidationException("Please specify either number of bytes or key spec.")
|
||||||
|
if key_spec and number_of_bytes:
|
||||||
|
raise ValidationException("Please specify either number of bytes or key spec.")
|
||||||
|
|
||||||
|
plaintext, key_arn = self.kms_backend.generate_data_key(key_id, encryption_context,
|
||||||
|
number_of_bytes, key_spec, grant_tokens)
|
||||||
|
|
||||||
|
plaintext = base64.b64encode(plaintext).decode()
|
||||||
|
|
||||||
|
return json.dumps({
|
||||||
|
'CiphertextBlob': plaintext,
|
||||||
|
'Plaintext': plaintext,
|
||||||
|
'KeyId': key_arn # not alias
|
||||||
|
})
|
||||||
|
|
||||||
|
def generate_data_key_without_plaintext(self):
|
||||||
|
result = json.loads(self.generate_data_key())
|
||||||
|
del result['Plaintext']
|
||||||
|
|
||||||
|
return json.dumps(result)
|
||||||
|
|
||||||
|
|
||||||
def _assert_valid_key_id(key_id):
|
def _assert_valid_key_id(key_id):
|
||||||
if not re.match(r'^[A-F0-9]{8}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{12}$', key_id, re.IGNORECASE):
|
if not re.match(r'^[A-F0-9]{8}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{12}$', key_id, re.IGNORECASE):
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException('Invalid keyId')
|
||||||
'message': ' Invalid keyId', '__type': 'NotFoundException'})
|
|
||||||
|
|
||||||
|
|
||||||
def _assert_default_policy(policy_name):
|
def _assert_default_policy(policy_name):
|
||||||
if policy_name != 'default':
|
if policy_name != 'default':
|
||||||
raise JSONResponseError(404, 'Not Found', body={
|
raise NotFoundException("No such policy exists")
|
||||||
'message': "No such policy exists",
|
|
||||||
'__type': 'NotFoundException'})
|
|
||||||
|
@ -242,7 +242,8 @@ class LogsBackend(BaseBackend):
|
|||||||
if next_token is None:
|
if next_token is None:
|
||||||
next_token = 0
|
next_token = 0
|
||||||
|
|
||||||
groups = sorted(group.to_describe_dict() for name, group in self.groups.items() if name.startswith(log_group_name_prefix))
|
groups = [group.to_describe_dict() for name, group in self.groups.items() if name.startswith(log_group_name_prefix)]
|
||||||
|
groups = sorted(groups, key=lambda x: x['creationTime'], reverse=True)
|
||||||
groups_page = groups[next_token:next_token + limit]
|
groups_page = groups[next_token:next_token + limit]
|
||||||
|
|
||||||
next_token += limit
|
next_token += limit
|
||||||
|
@ -29,7 +29,6 @@ import re
|
|||||||
from .compat import BaseClass
|
from .compat import BaseClass
|
||||||
from .utils import decode_utf8
|
from .utils import decode_utf8
|
||||||
|
|
||||||
|
|
||||||
STATUSES = {
|
STATUSES = {
|
||||||
100: "Continue",
|
100: "Continue",
|
||||||
101: "Switching Protocols",
|
101: "Switching Protocols",
|
||||||
|
@ -24,7 +24,7 @@ class HealthCheck(BaseModel):
|
|||||||
self.id = health_check_id
|
self.id = health_check_id
|
||||||
self.ip_address = health_check_args.get("ip_address")
|
self.ip_address = health_check_args.get("ip_address")
|
||||||
self.port = health_check_args.get("port", 80)
|
self.port = health_check_args.get("port", 80)
|
||||||
self._type = health_check_args.get("type")
|
self.type_ = health_check_args.get("type")
|
||||||
self.resource_path = health_check_args.get("resource_path")
|
self.resource_path = health_check_args.get("resource_path")
|
||||||
self.fqdn = health_check_args.get("fqdn")
|
self.fqdn = health_check_args.get("fqdn")
|
||||||
self.search_string = health_check_args.get("search_string")
|
self.search_string = health_check_args.get("search_string")
|
||||||
@ -58,7 +58,7 @@ class HealthCheck(BaseModel):
|
|||||||
<HealthCheckConfig>
|
<HealthCheckConfig>
|
||||||
<IPAddress>{{ health_check.ip_address }}</IPAddress>
|
<IPAddress>{{ health_check.ip_address }}</IPAddress>
|
||||||
<Port>{{ health_check.port }}</Port>
|
<Port>{{ health_check.port }}</Port>
|
||||||
<Type>{{ health_check._type }}</Type>
|
<Type>{{ health_check.type_ }}</Type>
|
||||||
<ResourcePath>{{ health_check.resource_path }}</ResourcePath>
|
<ResourcePath>{{ health_check.resource_path }}</ResourcePath>
|
||||||
<FullyQualifiedDomainName>{{ health_check.fqdn }}</FullyQualifiedDomainName>
|
<FullyQualifiedDomainName>{{ health_check.fqdn }}</FullyQualifiedDomainName>
|
||||||
<RequestInterval>{{ health_check.request_interval }}</RequestInterval>
|
<RequestInterval>{{ health_check.request_interval }}</RequestInterval>
|
||||||
@ -76,7 +76,7 @@ class RecordSet(BaseModel):
|
|||||||
|
|
||||||
def __init__(self, kwargs):
|
def __init__(self, kwargs):
|
||||||
self.name = kwargs.get('Name')
|
self.name = kwargs.get('Name')
|
||||||
self._type = kwargs.get('Type')
|
self.type_ = kwargs.get('Type')
|
||||||
self.ttl = kwargs.get('TTL')
|
self.ttl = kwargs.get('TTL')
|
||||||
self.records = kwargs.get('ResourceRecords', [])
|
self.records = kwargs.get('ResourceRecords', [])
|
||||||
self.set_identifier = kwargs.get('SetIdentifier')
|
self.set_identifier = kwargs.get('SetIdentifier')
|
||||||
@ -130,7 +130,7 @@ class RecordSet(BaseModel):
|
|||||||
def to_xml(self):
|
def to_xml(self):
|
||||||
template = Template("""<ResourceRecordSet>
|
template = Template("""<ResourceRecordSet>
|
||||||
<Name>{{ record_set.name }}</Name>
|
<Name>{{ record_set.name }}</Name>
|
||||||
<Type>{{ record_set._type }}</Type>
|
<Type>{{ record_set.type_ }}</Type>
|
||||||
{% if record_set.set_identifier %}
|
{% if record_set.set_identifier %}
|
||||||
<SetIdentifier>{{ record_set.set_identifier }}</SetIdentifier>
|
<SetIdentifier>{{ record_set.set_identifier }}</SetIdentifier>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@ -183,7 +183,7 @@ class FakeZone(BaseModel):
|
|||||||
def upsert_rrset(self, record_set):
|
def upsert_rrset(self, record_set):
|
||||||
new_rrset = RecordSet(record_set)
|
new_rrset = RecordSet(record_set)
|
||||||
for i, rrset in enumerate(self.rrsets):
|
for i, rrset in enumerate(self.rrsets):
|
||||||
if rrset.name == new_rrset.name:
|
if rrset.name == new_rrset.name and rrset.type_ == new_rrset.type_:
|
||||||
self.rrsets[i] = new_rrset
|
self.rrsets[i] = new_rrset
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
@ -202,7 +202,7 @@ class FakeZone(BaseModel):
|
|||||||
record_sets = list(self.rrsets) # Copy the list
|
record_sets = list(self.rrsets) # Copy the list
|
||||||
if start_type:
|
if start_type:
|
||||||
record_sets = [
|
record_sets = [
|
||||||
record_set for record_set in record_sets if record_set._type >= start_type]
|
record_set for record_set in record_sets if record_set.type_ >= start_type]
|
||||||
if start_name:
|
if start_name:
|
||||||
record_sets = [
|
record_sets = [
|
||||||
record_set for record_set in record_sets if record_set.name >= start_name]
|
record_set for record_set in record_sets if record_set.name >= start_name]
|
||||||
|
@ -123,6 +123,9 @@ class Route53(BaseResponse):
|
|||||||
""" % (record_set['Name'], the_zone.name)
|
""" % (record_set['Name'], the_zone.name)
|
||||||
return 400, headers, error_msg
|
return 400, headers, error_msg
|
||||||
|
|
||||||
|
if not record_set['Name'].endswith('.'):
|
||||||
|
record_set['Name'] += '.'
|
||||||
|
|
||||||
if action in ('CREATE', 'UPSERT'):
|
if action in ('CREATE', 'UPSERT'):
|
||||||
if 'ResourceRecords' in record_set:
|
if 'ResourceRecords' in record_set:
|
||||||
resource_records = list(
|
resource_records = list(
|
||||||
|
@ -178,3 +178,24 @@ class InvalidStorageClass(S3ClientError):
|
|||||||
"InvalidStorageClass",
|
"InvalidStorageClass",
|
||||||
"The storage class you specified is not valid",
|
"The storage class you specified is not valid",
|
||||||
*args, **kwargs)
|
*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidBucketName(S3ClientError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(InvalidBucketName, self).__init__(
|
||||||
|
"InvalidBucketName",
|
||||||
|
"The specified bucket is not valid.",
|
||||||
|
*args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateTagKeys(S3ClientError):
|
||||||
|
code = 400
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(DuplicateTagKeys, self).__init__(
|
||||||
|
"InvalidTag",
|
||||||
|
"Cannot provide multiple Tags with the same key",
|
||||||
|
*args, **kwargs)
|
||||||
|
@ -8,19 +8,26 @@ import itertools
|
|||||||
import codecs
|
import codecs
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
import tempfile
|
||||||
|
import sys
|
||||||
|
import uuid
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from bisect import insort
|
from bisect import insort
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
|
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
|
||||||
from .exceptions import BucketAlreadyExists, MissingBucket, InvalidPart, EntityTooSmall, MissingKey, \
|
from .exceptions import BucketAlreadyExists, MissingBucket, InvalidBucketName, InvalidPart, \
|
||||||
InvalidNotificationDestination, MalformedXML, InvalidStorageClass
|
EntityTooSmall, MissingKey, InvalidNotificationDestination, MalformedXML, InvalidStorageClass, DuplicateTagKeys
|
||||||
from .utils import clean_key_name, _VersionedKeyStore
|
from .utils import clean_key_name, _VersionedKeyStore
|
||||||
|
|
||||||
|
MAX_BUCKET_NAME_LENGTH = 63
|
||||||
|
MIN_BUCKET_NAME_LENGTH = 3
|
||||||
UPLOAD_ID_BYTES = 43
|
UPLOAD_ID_BYTES = 43
|
||||||
UPLOAD_PART_MIN_SIZE = 5242880
|
UPLOAD_PART_MIN_SIZE = 5242880
|
||||||
STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"]
|
STORAGE_CLASS = ["STANDARD", "REDUCED_REDUNDANCY", "STANDARD_IA", "ONEZONE_IA"]
|
||||||
|
DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024
|
||||||
|
DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
|
||||||
|
|
||||||
|
|
||||||
class FakeDeleteMarker(BaseModel):
|
class FakeDeleteMarker(BaseModel):
|
||||||
@ -29,7 +36,7 @@ class FakeDeleteMarker(BaseModel):
|
|||||||
self.key = key
|
self.key = key
|
||||||
self.name = key.name
|
self.name = key.name
|
||||||
self.last_modified = datetime.datetime.utcnow()
|
self.last_modified = datetime.datetime.utcnow()
|
||||||
self._version_id = key.version_id + 1
|
self._version_id = str(uuid.uuid4())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_modified_ISO8601(self):
|
def last_modified_ISO8601(self):
|
||||||
@ -42,9 +49,9 @@ class FakeDeleteMarker(BaseModel):
|
|||||||
|
|
||||||
class FakeKey(BaseModel):
|
class FakeKey(BaseModel):
|
||||||
|
|
||||||
def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0):
|
def __init__(self, name, value, storage="STANDARD", etag=None, is_versioned=False, version_id=0,
|
||||||
|
max_buffer_size=DEFAULT_KEY_BUFFER_SIZE):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.value = value
|
|
||||||
self.last_modified = datetime.datetime.utcnow()
|
self.last_modified = datetime.datetime.utcnow()
|
||||||
self.acl = get_canned_acl('private')
|
self.acl = get_canned_acl('private')
|
||||||
self.website_redirect_location = None
|
self.website_redirect_location = None
|
||||||
@ -56,14 +63,37 @@ class FakeKey(BaseModel):
|
|||||||
self._is_versioned = is_versioned
|
self._is_versioned = is_versioned
|
||||||
self._tagging = FakeTagging()
|
self._tagging = FakeTagging()
|
||||||
|
|
||||||
|
self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size)
|
||||||
|
self._max_buffer_size = max_buffer_size
|
||||||
|
self.value = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_id(self):
|
def version_id(self):
|
||||||
return self._version_id
|
return self._version_id
|
||||||
|
|
||||||
def copy(self, new_name=None):
|
@property
|
||||||
|
def value(self):
|
||||||
|
self._value_buffer.seek(0)
|
||||||
|
return self._value_buffer.read()
|
||||||
|
|
||||||
|
@value.setter
|
||||||
|
def value(self, new_value):
|
||||||
|
self._value_buffer.seek(0)
|
||||||
|
self._value_buffer.truncate()
|
||||||
|
|
||||||
|
# Hack for working around moto's own unit tests; this probably won't
|
||||||
|
# actually get hit in normal use.
|
||||||
|
if isinstance(new_value, six.text_type):
|
||||||
|
new_value = new_value.encode(DEFAULT_TEXT_ENCODING)
|
||||||
|
self._value_buffer.write(new_value)
|
||||||
|
|
||||||
|
def copy(self, new_name=None, new_is_versioned=None):
|
||||||
r = copy.deepcopy(self)
|
r = copy.deepcopy(self)
|
||||||
if new_name is not None:
|
if new_name is not None:
|
||||||
r.name = new_name
|
r.name = new_name
|
||||||
|
if new_is_versioned is not None:
|
||||||
|
r._is_versioned = new_is_versioned
|
||||||
|
r.refresh_version()
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def set_metadata(self, metadata, replace=False):
|
def set_metadata(self, metadata, replace=False):
|
||||||
@ -83,29 +113,34 @@ class FakeKey(BaseModel):
|
|||||||
self.acl = acl
|
self.acl = acl
|
||||||
|
|
||||||
def append_to_value(self, value):
|
def append_to_value(self, value):
|
||||||
self.value += value
|
self._value_buffer.seek(0, os.SEEK_END)
|
||||||
|
self._value_buffer.write(value)
|
||||||
|
|
||||||
self.last_modified = datetime.datetime.utcnow()
|
self.last_modified = datetime.datetime.utcnow()
|
||||||
self._etag = None # must recalculate etag
|
self._etag = None # must recalculate etag
|
||||||
if self._is_versioned:
|
if self._is_versioned:
|
||||||
self._version_id += 1
|
self._version_id = str(uuid.uuid4())
|
||||||
else:
|
else:
|
||||||
self._is_versioned = 0
|
self._version_id = None
|
||||||
|
|
||||||
def restore(self, days):
|
def restore(self, days):
|
||||||
self._expiry = datetime.datetime.utcnow() + datetime.timedelta(days)
|
self._expiry = datetime.datetime.utcnow() + datetime.timedelta(days)
|
||||||
|
|
||||||
def increment_version(self):
|
def refresh_version(self):
|
||||||
self._version_id += 1
|
self._version_id = str(uuid.uuid4())
|
||||||
|
self.last_modified = datetime.datetime.utcnow()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def etag(self):
|
def etag(self):
|
||||||
if self._etag is None:
|
if self._etag is None:
|
||||||
value_md5 = hashlib.md5()
|
value_md5 = hashlib.md5()
|
||||||
if isinstance(self.value, six.text_type):
|
self._value_buffer.seek(0)
|
||||||
value = self.value.encode("utf-8")
|
while True:
|
||||||
else:
|
block = self._value_buffer.read(DEFAULT_KEY_BUFFER_SIZE)
|
||||||
value = self.value
|
if not block:
|
||||||
value_md5.update(value)
|
break
|
||||||
|
value_md5.update(block)
|
||||||
|
|
||||||
self._etag = value_md5.hexdigest()
|
self._etag = value_md5.hexdigest()
|
||||||
return '"{0}"'.format(self._etag)
|
return '"{0}"'.format(self._etag)
|
||||||
|
|
||||||
@ -132,7 +167,7 @@ class FakeKey(BaseModel):
|
|||||||
res = {
|
res = {
|
||||||
'ETag': self.etag,
|
'ETag': self.etag,
|
||||||
'last-modified': self.last_modified_RFC1123,
|
'last-modified': self.last_modified_RFC1123,
|
||||||
'content-length': str(len(self.value)),
|
'content-length': str(self.size),
|
||||||
}
|
}
|
||||||
if self._storage_class != 'STANDARD':
|
if self._storage_class != 'STANDARD':
|
||||||
res['x-amz-storage-class'] = self._storage_class
|
res['x-amz-storage-class'] = self._storage_class
|
||||||
@ -150,7 +185,8 @@ class FakeKey(BaseModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def size(self):
|
def size(self):
|
||||||
return len(self.value)
|
self._value_buffer.seek(0, os.SEEK_END)
|
||||||
|
return self._value_buffer.tell()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def storage_class(self):
|
def storage_class(self):
|
||||||
@ -161,6 +197,26 @@ class FakeKey(BaseModel):
|
|||||||
if self._expiry is not None:
|
if self._expiry is not None:
|
||||||
return self._expiry.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
return self._expiry.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||||
|
|
||||||
|
# Keys need to be pickleable due to some implementation details of boto3.
|
||||||
|
# Since file objects aren't pickleable, we need to override the default
|
||||||
|
# behavior. The following is adapted from the Python docs:
|
||||||
|
# https://docs.python.org/3/library/pickle.html#handling-stateful-objects
|
||||||
|
def __getstate__(self):
|
||||||
|
state = self.__dict__.copy()
|
||||||
|
state['value'] = self.value
|
||||||
|
del state['_value_buffer']
|
||||||
|
return state
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self.__dict__.update({
|
||||||
|
k: v for k, v in six.iteritems(state)
|
||||||
|
if k != 'value'
|
||||||
|
})
|
||||||
|
|
||||||
|
self._value_buffer = \
|
||||||
|
tempfile.SpooledTemporaryFile(max_size=self._max_buffer_size)
|
||||||
|
self.value = state['value']
|
||||||
|
|
||||||
|
|
||||||
class FakeMultipart(BaseModel):
|
class FakeMultipart(BaseModel):
|
||||||
|
|
||||||
@ -634,6 +690,8 @@ class S3Backend(BaseBackend):
|
|||||||
def create_bucket(self, bucket_name, region_name):
|
def create_bucket(self, bucket_name, region_name):
|
||||||
if bucket_name in self.buckets:
|
if bucket_name in self.buckets:
|
||||||
raise BucketAlreadyExists(bucket=bucket_name)
|
raise BucketAlreadyExists(bucket=bucket_name)
|
||||||
|
if not MIN_BUCKET_NAME_LENGTH <= len(bucket_name) <= MAX_BUCKET_NAME_LENGTH:
|
||||||
|
raise InvalidBucketName()
|
||||||
new_bucket = FakeBucket(name=bucket_name, region_name=region_name)
|
new_bucket = FakeBucket(name=bucket_name, region_name=region_name)
|
||||||
self.buckets[bucket_name] = new_bucket
|
self.buckets[bucket_name] = new_bucket
|
||||||
return new_bucket
|
return new_bucket
|
||||||
@ -663,17 +721,18 @@ class S3Backend(BaseBackend):
|
|||||||
|
|
||||||
def get_bucket_latest_versions(self, bucket_name):
|
def get_bucket_latest_versions(self, bucket_name):
|
||||||
versions = self.get_bucket_versions(bucket_name)
|
versions = self.get_bucket_versions(bucket_name)
|
||||||
maximum_version_per_key = {}
|
latest_modified_per_key = {}
|
||||||
latest_versions = {}
|
latest_versions = {}
|
||||||
|
|
||||||
for version in versions:
|
for version in versions:
|
||||||
name = version.name
|
name = version.name
|
||||||
|
last_modified = version.last_modified
|
||||||
version_id = version.version_id
|
version_id = version.version_id
|
||||||
maximum_version_per_key[name] = max(
|
latest_modified_per_key[name] = max(
|
||||||
version_id,
|
last_modified,
|
||||||
maximum_version_per_key.get(name, -1)
|
latest_modified_per_key.get(name, datetime.datetime.min)
|
||||||
)
|
)
|
||||||
if version_id == maximum_version_per_key[name]:
|
if last_modified == latest_modified_per_key[name]:
|
||||||
latest_versions[name] = version_id
|
latest_versions[name] = version_id
|
||||||
|
|
||||||
return latest_versions
|
return latest_versions
|
||||||
@ -721,20 +780,19 @@ class S3Backend(BaseBackend):
|
|||||||
|
|
||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
|
|
||||||
old_key = bucket.keys.get(key_name, None)
|
|
||||||
if old_key is not None and bucket.is_versioned:
|
|
||||||
new_version_id = old_key._version_id + 1
|
|
||||||
else:
|
|
||||||
new_version_id = 0
|
|
||||||
|
|
||||||
new_key = FakeKey(
|
new_key = FakeKey(
|
||||||
name=key_name,
|
name=key_name,
|
||||||
value=value,
|
value=value,
|
||||||
storage=storage,
|
storage=storage,
|
||||||
etag=etag,
|
etag=etag,
|
||||||
is_versioned=bucket.is_versioned,
|
is_versioned=bucket.is_versioned,
|
||||||
version_id=new_version_id)
|
version_id=str(uuid.uuid4()) if bucket.is_versioned else None)
|
||||||
bucket.keys[key_name] = new_key
|
|
||||||
|
keys = [
|
||||||
|
key for key in bucket.keys.getlist(key_name, [])
|
||||||
|
if key.version_id != new_key.version_id
|
||||||
|
] + [new_key]
|
||||||
|
bucket.keys.setlist(key_name, keys)
|
||||||
|
|
||||||
return new_key
|
return new_key
|
||||||
|
|
||||||
@ -773,6 +831,9 @@ class S3Backend(BaseBackend):
|
|||||||
return key
|
return key
|
||||||
|
|
||||||
def put_bucket_tagging(self, bucket_name, tagging):
|
def put_bucket_tagging(self, bucket_name, tagging):
|
||||||
|
tag_keys = [tag.key for tag in tagging.tag_set.tags]
|
||||||
|
if len(tag_keys) != len(set(tag_keys)):
|
||||||
|
raise DuplicateTagKeys()
|
||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
bucket.set_tags(tagging)
|
bucket.set_tags(tagging)
|
||||||
|
|
||||||
@ -915,17 +976,15 @@ class S3Backend(BaseBackend):
|
|||||||
dest_bucket = self.get_bucket(dest_bucket_name)
|
dest_bucket = self.get_bucket(dest_bucket_name)
|
||||||
key = self.get_key(src_bucket_name, src_key_name,
|
key = self.get_key(src_bucket_name, src_key_name,
|
||||||
version_id=src_version_id)
|
version_id=src_version_id)
|
||||||
if dest_key_name != src_key_name:
|
|
||||||
key = key.copy(dest_key_name)
|
|
||||||
dest_bucket.keys[dest_key_name] = key
|
|
||||||
|
|
||||||
# By this point, the destination key must exist, or KeyError
|
new_key = key.copy(dest_key_name, dest_bucket.is_versioned)
|
||||||
if dest_bucket.is_versioned:
|
|
||||||
dest_bucket.keys[dest_key_name].increment_version()
|
|
||||||
if storage is not None:
|
if storage is not None:
|
||||||
key.set_storage_class(storage)
|
new_key.set_storage_class(storage)
|
||||||
if acl is not None:
|
if acl is not None:
|
||||||
key.set_acl(acl)
|
new_key.set_acl(acl)
|
||||||
|
|
||||||
|
dest_bucket.keys[dest_key_name] = new_key
|
||||||
|
|
||||||
def set_bucket_acl(self, bucket_name, acl):
|
def set_bucket_acl(self, bucket_name, acl):
|
||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
|
@ -19,7 +19,7 @@ from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, Missi
|
|||||||
MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent
|
MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent
|
||||||
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \
|
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \
|
||||||
FakeTag
|
FakeTag
|
||||||
from .utils import bucket_name_from_url, metadata_from_headers, parse_region_from_url
|
from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, parse_region_from_url
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
|
|
||||||
|
|
||||||
@ -193,7 +193,13 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
|||||||
elif 'location' in querystring:
|
elif 'location' in querystring:
|
||||||
bucket = self.backend.get_bucket(bucket_name)
|
bucket = self.backend.get_bucket(bucket_name)
|
||||||
template = self.response_template(S3_BUCKET_LOCATION)
|
template = self.response_template(S3_BUCKET_LOCATION)
|
||||||
return template.render(location=bucket.location)
|
|
||||||
|
location = bucket.location
|
||||||
|
# us-east-1 is different - returns a None location
|
||||||
|
if location == DEFAULT_REGION_NAME:
|
||||||
|
location = None
|
||||||
|
|
||||||
|
return template.render(location=location)
|
||||||
elif 'lifecycle' in querystring:
|
elif 'lifecycle' in querystring:
|
||||||
bucket = self.backend.get_bucket(bucket_name)
|
bucket = self.backend.get_bucket(bucket_name)
|
||||||
if not bucket.rules:
|
if not bucket.rules:
|
||||||
@ -338,9 +344,15 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
|||||||
|
|
||||||
if continuation_token or start_after:
|
if continuation_token or start_after:
|
||||||
limit = continuation_token or start_after
|
limit = continuation_token or start_after
|
||||||
|
if not delimiter:
|
||||||
result_keys = self._get_results_from_token(result_keys, limit)
|
result_keys = self._get_results_from_token(result_keys, limit)
|
||||||
|
else:
|
||||||
|
result_folders = self._get_results_from_token(result_folders, limit)
|
||||||
|
|
||||||
|
if not delimiter:
|
||||||
result_keys, is_truncated, next_continuation_token = self._truncate_result(result_keys, max_keys)
|
result_keys, is_truncated, next_continuation_token = self._truncate_result(result_keys, max_keys)
|
||||||
|
else:
|
||||||
|
result_folders, is_truncated, next_continuation_token = self._truncate_result(result_folders, max_keys)
|
||||||
|
|
||||||
return template.render(
|
return template.render(
|
||||||
bucket=bucket,
|
bucket=bucket,
|
||||||
@ -358,7 +370,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
|||||||
def _get_results_from_token(self, result_keys, token):
|
def _get_results_from_token(self, result_keys, token):
|
||||||
continuation_index = 0
|
continuation_index = 0
|
||||||
for key in result_keys:
|
for key in result_keys:
|
||||||
if key.name > token:
|
if (key.name if isinstance(key, FakeKey) else key) > token:
|
||||||
break
|
break
|
||||||
continuation_index += 1
|
continuation_index += 1
|
||||||
return result_keys[continuation_index:]
|
return result_keys[continuation_index:]
|
||||||
@ -367,7 +379,8 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
|||||||
if len(result_keys) > max_keys:
|
if len(result_keys) > max_keys:
|
||||||
is_truncated = 'true'
|
is_truncated = 'true'
|
||||||
result_keys = result_keys[:max_keys]
|
result_keys = result_keys[:max_keys]
|
||||||
next_continuation_token = result_keys[-1].name
|
item = result_keys[-1]
|
||||||
|
next_continuation_token = (item.name if isinstance(item, FakeKey) else item)
|
||||||
else:
|
else:
|
||||||
is_truncated = 'false'
|
is_truncated = 'false'
|
||||||
next_continuation_token = None
|
next_continuation_token = None
|
||||||
@ -432,8 +445,19 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
if body:
|
if body:
|
||||||
|
# us-east-1, the default AWS region behaves a bit differently
|
||||||
|
# - you should not use it as a location constraint --> it fails
|
||||||
|
# - querying the location constraint returns None
|
||||||
try:
|
try:
|
||||||
region_name = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint']
|
forced_region = xmltodict.parse(body)['CreateBucketConfiguration']['LocationConstraint']
|
||||||
|
|
||||||
|
if forced_region == DEFAULT_REGION_NAME:
|
||||||
|
raise S3ClientError(
|
||||||
|
'InvalidLocationConstraint',
|
||||||
|
'The specified location-constraint is not valid'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
region_name = forced_region
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -709,7 +733,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
|||||||
# Copy key
|
# Copy key
|
||||||
# you can have a quoted ?version=abc with a version Id, so work on
|
# you can have a quoted ?version=abc with a version Id, so work on
|
||||||
# we need to parse the unquoted string first
|
# we need to parse the unquoted string first
|
||||||
src_key = request.headers.get("x-amz-copy-source")
|
src_key = clean_key_name(request.headers.get("x-amz-copy-source"))
|
||||||
if isinstance(src_key, six.binary_type):
|
if isinstance(src_key, six.binary_type):
|
||||||
src_key = src_key.decode('utf-8')
|
src_key = src_key.decode('utf-8')
|
||||||
src_key_parsed = urlparse(src_key)
|
src_key_parsed = urlparse(src_key)
|
||||||
@ -1176,7 +1200,7 @@ S3_DELETE_BUCKET_WITH_ITEMS_ERROR = """<?xml version="1.0" encoding="UTF-8"?>
|
|||||||
</Error>"""
|
</Error>"""
|
||||||
|
|
||||||
S3_BUCKET_LOCATION = """<?xml version="1.0" encoding="UTF-8"?>
|
S3_BUCKET_LOCATION = """<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/">{{ location }}</LocationConstraint>"""
|
<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/">{% if location != None %}{{ location }}{% endif %}</LocationConstraint>"""
|
||||||
|
|
||||||
S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
|
S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<LifecycleConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
<LifecycleConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||||
@ -1279,7 +1303,7 @@ S3_BUCKET_GET_VERSIONS = """<?xml version="1.0" encoding="UTF-8"?>
|
|||||||
{% for key in key_list %}
|
{% for key in key_list %}
|
||||||
<Version>
|
<Version>
|
||||||
<Key>{{ key.name }}</Key>
|
<Key>{{ key.name }}</Key>
|
||||||
<VersionId>{{ key.version_id }}</VersionId>
|
<VersionId>{% if key.version_id is none %}null{% else %}{{ key.version_id }}{% endif %}</VersionId>
|
||||||
<IsLatest>{% if latest_versions[key.name] == key.version_id %}true{% else %}false{% endif %}</IsLatest>
|
<IsLatest>{% if latest_versions[key.name] == key.version_id %}true{% else %}false{% endif %}</IsLatest>
|
||||||
<LastModified>{{ key.last_modified_ISO8601 }}</LastModified>
|
<LastModified>{{ key.last_modified_ISO8601 }}</LastModified>
|
||||||
<ETag>{{ key.etag }}</ETag>
|
<ETag>{{ key.etag }}</ETag>
|
||||||
|
@ -27,3 +27,10 @@ class InvalidParameterException(SecretsManagerClientError):
|
|||||||
super(InvalidParameterException, self).__init__(
|
super(InvalidParameterException, self).__init__(
|
||||||
'InvalidParameterException',
|
'InvalidParameterException',
|
||||||
message)
|
message)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidRequestException(SecretsManagerClientError):
|
||||||
|
def __init__(self, message):
|
||||||
|
super(InvalidRequestException, self).__init__(
|
||||||
|
'InvalidRequestException',
|
||||||
|
message)
|
||||||
|
@ -2,6 +2,8 @@ from __future__ import unicode_literals
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
|
import uuid
|
||||||
|
import datetime
|
||||||
|
|
||||||
import boto3
|
import boto3
|
||||||
|
|
||||||
@ -9,6 +11,7 @@ from moto.core import BaseBackend, BaseModel
|
|||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
ResourceNotFoundException,
|
ResourceNotFoundException,
|
||||||
InvalidParameterException,
|
InvalidParameterException,
|
||||||
|
InvalidRequestException,
|
||||||
ClientError
|
ClientError
|
||||||
)
|
)
|
||||||
from .utils import random_password, secret_arn
|
from .utils import random_password, secret_arn
|
||||||
@ -18,10 +21,6 @@ class SecretsManager(BaseModel):
|
|||||||
|
|
||||||
def __init__(self, region_name, **kwargs):
|
def __init__(self, region_name, **kwargs):
|
||||||
self.region = region_name
|
self.region = region_name
|
||||||
self.secret_id = kwargs.get('secret_id', '')
|
|
||||||
self.version_id = kwargs.get('version_id', '')
|
|
||||||
self.version_stage = kwargs.get('version_stage', '')
|
|
||||||
self.secret_string = ''
|
|
||||||
|
|
||||||
|
|
||||||
class SecretsManagerBackend(BaseBackend):
|
class SecretsManagerBackend(BaseBackend):
|
||||||
@ -29,14 +28,7 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
def __init__(self, region_name=None, **kwargs):
|
def __init__(self, region_name=None, **kwargs):
|
||||||
super(SecretsManagerBackend, self).__init__()
|
super(SecretsManagerBackend, self).__init__()
|
||||||
self.region = region_name
|
self.region = region_name
|
||||||
self.secret_id = kwargs.get('secret_id', '')
|
self.secrets = {}
|
||||||
self.name = kwargs.get('name', '')
|
|
||||||
self.createdate = int(time.time())
|
|
||||||
self.secret_string = ''
|
|
||||||
self.rotation_enabled = False
|
|
||||||
self.rotation_lambda_arn = ''
|
|
||||||
self.auto_rotate_after_days = 0
|
|
||||||
self.version_id = ''
|
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
region_name = self.region
|
region_name = self.region
|
||||||
@ -44,36 +36,60 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
self.__init__(region_name)
|
self.__init__(region_name)
|
||||||
|
|
||||||
def _is_valid_identifier(self, identifier):
|
def _is_valid_identifier(self, identifier):
|
||||||
return identifier in (self.name, self.secret_id)
|
return identifier in self.secrets
|
||||||
|
|
||||||
|
def _unix_time_secs(self, dt):
|
||||||
|
epoch = datetime.datetime.utcfromtimestamp(0)
|
||||||
|
return (dt - epoch).total_seconds()
|
||||||
|
|
||||||
def get_secret_value(self, secret_id, version_id, version_stage):
|
def get_secret_value(self, secret_id, version_id, version_stage):
|
||||||
|
|
||||||
if not self._is_valid_identifier(secret_id):
|
if not self._is_valid_identifier(secret_id):
|
||||||
raise ResourceNotFoundException()
|
raise ResourceNotFoundException()
|
||||||
|
|
||||||
|
if 'deleted_date' in self.secrets[secret_id]:
|
||||||
|
raise InvalidRequestException(
|
||||||
|
"An error occurred (InvalidRequestException) when calling the GetSecretValue operation: You tried to \
|
||||||
|
perform the operation on a secret that's currently marked deleted."
|
||||||
|
)
|
||||||
|
|
||||||
|
secret = self.secrets[secret_id]
|
||||||
|
|
||||||
response = json.dumps({
|
response = json.dumps({
|
||||||
"ARN": secret_arn(self.region, self.secret_id),
|
"ARN": secret_arn(self.region, secret['secret_id']),
|
||||||
"Name": self.name,
|
"Name": secret['name'],
|
||||||
"VersionId": "A435958A-D821-4193-B719-B7769357AER4",
|
"VersionId": secret['version_id'],
|
||||||
"SecretString": self.secret_string,
|
"SecretString": secret['secret_string'],
|
||||||
"VersionStages": [
|
"VersionStages": [
|
||||||
"AWSCURRENT",
|
"AWSCURRENT",
|
||||||
],
|
],
|
||||||
"CreatedDate": "2018-05-23 13:16:57.198000"
|
"CreatedDate": secret['createdate']
|
||||||
})
|
})
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def create_secret(self, name, secret_string, **kwargs):
|
def create_secret(self, name, secret_string, tags, **kwargs):
|
||||||
|
|
||||||
self.secret_string = secret_string
|
generated_version_id = str(uuid.uuid4())
|
||||||
self.secret_id = name
|
|
||||||
self.name = name
|
secret = {
|
||||||
|
'secret_string': secret_string,
|
||||||
|
'secret_id': name,
|
||||||
|
'name': name,
|
||||||
|
'createdate': int(time.time()),
|
||||||
|
'rotation_enabled': False,
|
||||||
|
'rotation_lambda_arn': '',
|
||||||
|
'auto_rotate_after_days': 0,
|
||||||
|
'version_id': generated_version_id,
|
||||||
|
'tags': tags
|
||||||
|
}
|
||||||
|
|
||||||
|
self.secrets[name] = secret
|
||||||
|
|
||||||
response = json.dumps({
|
response = json.dumps({
|
||||||
"ARN": secret_arn(self.region, name),
|
"ARN": secret_arn(self.region, name),
|
||||||
"Name": self.name,
|
"Name": name,
|
||||||
"VersionId": "A435958A-D821-4193-B719-B7769357AER4",
|
"VersionId": generated_version_id,
|
||||||
})
|
})
|
||||||
|
|
||||||
return response
|
return response
|
||||||
@ -82,26 +98,23 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
if not self._is_valid_identifier(secret_id):
|
if not self._is_valid_identifier(secret_id):
|
||||||
raise ResourceNotFoundException
|
raise ResourceNotFoundException
|
||||||
|
|
||||||
|
secret = self.secrets[secret_id]
|
||||||
|
|
||||||
response = json.dumps({
|
response = json.dumps({
|
||||||
"ARN": secret_arn(self.region, self.secret_id),
|
"ARN": secret_arn(self.region, secret['secret_id']),
|
||||||
"Name": self.name,
|
"Name": secret['name'],
|
||||||
"Description": "",
|
"Description": "",
|
||||||
"KmsKeyId": "",
|
"KmsKeyId": "",
|
||||||
"RotationEnabled": self.rotation_enabled,
|
"RotationEnabled": secret['rotation_enabled'],
|
||||||
"RotationLambdaARN": self.rotation_lambda_arn,
|
"RotationLambdaARN": secret['rotation_lambda_arn'],
|
||||||
"RotationRules": {
|
"RotationRules": {
|
||||||
"AutomaticallyAfterDays": self.auto_rotate_after_days
|
"AutomaticallyAfterDays": secret['auto_rotate_after_days']
|
||||||
},
|
},
|
||||||
"LastRotatedDate": None,
|
"LastRotatedDate": None,
|
||||||
"LastChangedDate": None,
|
"LastChangedDate": None,
|
||||||
"LastAccessedDate": None,
|
"LastAccessedDate": None,
|
||||||
"DeletedDate": None,
|
"DeletedDate": secret.get('deleted_date', None),
|
||||||
"Tags": [
|
"Tags": secret['tags']
|
||||||
{
|
|
||||||
"Key": "",
|
|
||||||
"Value": ""
|
|
||||||
},
|
|
||||||
]
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return response
|
return response
|
||||||
@ -114,6 +127,12 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
if not self._is_valid_identifier(secret_id):
|
if not self._is_valid_identifier(secret_id):
|
||||||
raise ResourceNotFoundException
|
raise ResourceNotFoundException
|
||||||
|
|
||||||
|
if 'deleted_date' in self.secrets[secret_id]:
|
||||||
|
raise InvalidRequestException(
|
||||||
|
"An error occurred (InvalidRequestException) when calling the RotateSecret operation: You tried to \
|
||||||
|
perform the operation on a secret that's currently marked deleted."
|
||||||
|
)
|
||||||
|
|
||||||
if client_request_token:
|
if client_request_token:
|
||||||
token_length = len(client_request_token)
|
token_length = len(client_request_token)
|
||||||
if token_length < 32 or token_length > 64:
|
if token_length < 32 or token_length > 64:
|
||||||
@ -141,17 +160,19 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
)
|
)
|
||||||
raise InvalidParameterException(msg)
|
raise InvalidParameterException(msg)
|
||||||
|
|
||||||
self.version_id = client_request_token or ''
|
secret = self.secrets[secret_id]
|
||||||
self.rotation_lambda_arn = rotation_lambda_arn or ''
|
|
||||||
|
secret['version_id'] = client_request_token or ''
|
||||||
|
secret['rotation_lambda_arn'] = rotation_lambda_arn or ''
|
||||||
if rotation_rules:
|
if rotation_rules:
|
||||||
self.auto_rotate_after_days = rotation_rules.get(rotation_days, 0)
|
secret['auto_rotate_after_days'] = rotation_rules.get(rotation_days, 0)
|
||||||
if self.auto_rotate_after_days > 0:
|
if secret['auto_rotate_after_days'] > 0:
|
||||||
self.rotation_enabled = True
|
secret['rotation_enabled'] = True
|
||||||
|
|
||||||
response = json.dumps({
|
response = json.dumps({
|
||||||
"ARN": secret_arn(self.region, self.secret_id),
|
"ARN": secret_arn(self.region, secret['secret_id']),
|
||||||
"Name": self.name,
|
"Name": secret['name'],
|
||||||
"VersionId": self.version_id
|
"VersionId": secret['version_id']
|
||||||
})
|
})
|
||||||
|
|
||||||
return response
|
return response
|
||||||
@ -185,6 +206,85 @@ class SecretsManagerBackend(BaseBackend):
|
|||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
def list_secrets(self, max_results, next_token):
|
||||||
|
# TODO implement pagination and limits
|
||||||
|
|
||||||
|
secret_list = [{
|
||||||
|
"ARN": secret_arn(self.region, secret['secret_id']),
|
||||||
|
"DeletedDate": secret.get('deleted_date', None),
|
||||||
|
"Description": "",
|
||||||
|
"KmsKeyId": "",
|
||||||
|
"LastAccessedDate": None,
|
||||||
|
"LastChangedDate": None,
|
||||||
|
"LastRotatedDate": None,
|
||||||
|
"Name": secret['name'],
|
||||||
|
"RotationEnabled": secret['rotation_enabled'],
|
||||||
|
"RotationLambdaARN": secret['rotation_lambda_arn'],
|
||||||
|
"RotationRules": {
|
||||||
|
"AutomaticallyAfterDays": secret['auto_rotate_after_days']
|
||||||
|
},
|
||||||
|
"SecretVersionsToStages": {
|
||||||
|
secret['version_id']: ["AWSCURRENT"]
|
||||||
|
},
|
||||||
|
"Tags": secret['tags']
|
||||||
|
} for secret in self.secrets.values()]
|
||||||
|
|
||||||
|
return secret_list, None
|
||||||
|
|
||||||
|
def delete_secret(self, secret_id, recovery_window_in_days, force_delete_without_recovery):
|
||||||
|
|
||||||
|
if not self._is_valid_identifier(secret_id):
|
||||||
|
raise ResourceNotFoundException
|
||||||
|
|
||||||
|
if 'deleted_date' in self.secrets[secret_id]:
|
||||||
|
raise InvalidRequestException(
|
||||||
|
"An error occurred (InvalidRequestException) when calling the DeleteSecret operation: You tried to \
|
||||||
|
perform the operation on a secret that's currently marked deleted."
|
||||||
|
)
|
||||||
|
|
||||||
|
if recovery_window_in_days and force_delete_without_recovery:
|
||||||
|
raise InvalidParameterException(
|
||||||
|
"An error occurred (InvalidParameterException) when calling the DeleteSecret operation: You can't \
|
||||||
|
use ForceDeleteWithoutRecovery in conjunction with RecoveryWindowInDays."
|
||||||
|
)
|
||||||
|
|
||||||
|
if recovery_window_in_days and (recovery_window_in_days < 7 or recovery_window_in_days > 30):
|
||||||
|
raise InvalidParameterException(
|
||||||
|
"An error occurred (InvalidParameterException) when calling the DeleteSecret operation: The \
|
||||||
|
RecoveryWindowInDays value must be between 7 and 30 days (inclusive)."
|
||||||
|
)
|
||||||
|
|
||||||
|
deletion_date = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
if force_delete_without_recovery:
|
||||||
|
secret = self.secrets.pop(secret_id, None)
|
||||||
|
else:
|
||||||
|
deletion_date += datetime.timedelta(days=recovery_window_in_days or 30)
|
||||||
|
self.secrets[secret_id]['deleted_date'] = self._unix_time_secs(deletion_date)
|
||||||
|
secret = self.secrets.get(secret_id, None)
|
||||||
|
|
||||||
|
if not secret:
|
||||||
|
raise ResourceNotFoundException
|
||||||
|
|
||||||
|
arn = secret_arn(self.region, secret['secret_id'])
|
||||||
|
name = secret['name']
|
||||||
|
|
||||||
|
return arn, name, self._unix_time_secs(deletion_date)
|
||||||
|
|
||||||
|
def restore_secret(self, secret_id):
|
||||||
|
|
||||||
|
if not self._is_valid_identifier(secret_id):
|
||||||
|
raise ResourceNotFoundException
|
||||||
|
|
||||||
|
self.secrets[secret_id].pop('deleted_date', None)
|
||||||
|
|
||||||
|
secret = self.secrets[secret_id]
|
||||||
|
|
||||||
|
arn = secret_arn(self.region, secret['secret_id'])
|
||||||
|
name = secret['name']
|
||||||
|
|
||||||
|
return arn, name
|
||||||
|
|
||||||
|
|
||||||
available_regions = (
|
available_regions = (
|
||||||
boto3.session.Session().get_available_regions("secretsmanager")
|
boto3.session.Session().get_available_regions("secretsmanager")
|
||||||
|
@ -4,6 +4,8 @@ from moto.core.responses import BaseResponse
|
|||||||
|
|
||||||
from .models import secretsmanager_backends
|
from .models import secretsmanager_backends
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
class SecretsManagerResponse(BaseResponse):
|
class SecretsManagerResponse(BaseResponse):
|
||||||
|
|
||||||
@ -19,9 +21,11 @@ class SecretsManagerResponse(BaseResponse):
|
|||||||
def create_secret(self):
|
def create_secret(self):
|
||||||
name = self._get_param('Name')
|
name = self._get_param('Name')
|
||||||
secret_string = self._get_param('SecretString')
|
secret_string = self._get_param('SecretString')
|
||||||
|
tags = self._get_param('Tags', if_none=[])
|
||||||
return secretsmanager_backends[self.region].create_secret(
|
return secretsmanager_backends[self.region].create_secret(
|
||||||
name=name,
|
name=name,
|
||||||
secret_string=secret_string
|
secret_string=secret_string,
|
||||||
|
tags=tags
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_random_password(self):
|
def get_random_password(self):
|
||||||
@ -62,3 +66,30 @@ class SecretsManagerResponse(BaseResponse):
|
|||||||
rotation_lambda_arn=rotation_lambda_arn,
|
rotation_lambda_arn=rotation_lambda_arn,
|
||||||
rotation_rules=rotation_rules
|
rotation_rules=rotation_rules
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def list_secrets(self):
|
||||||
|
max_results = self._get_int_param("MaxResults")
|
||||||
|
next_token = self._get_param("NextToken")
|
||||||
|
secret_list, next_token = secretsmanager_backends[self.region].list_secrets(
|
||||||
|
max_results=max_results,
|
||||||
|
next_token=next_token,
|
||||||
|
)
|
||||||
|
return json.dumps(dict(SecretList=secret_list, NextToken=next_token))
|
||||||
|
|
||||||
|
def delete_secret(self):
|
||||||
|
secret_id = self._get_param("SecretId")
|
||||||
|
recovery_window_in_days = self._get_param("RecoveryWindowInDays")
|
||||||
|
force_delete_without_recovery = self._get_param("ForceDeleteWithoutRecovery")
|
||||||
|
arn, name, deletion_date = secretsmanager_backends[self.region].delete_secret(
|
||||||
|
secret_id=secret_id,
|
||||||
|
recovery_window_in_days=recovery_window_in_days,
|
||||||
|
force_delete_without_recovery=force_delete_without_recovery,
|
||||||
|
)
|
||||||
|
return json.dumps(dict(ARN=arn, Name=name, DeletionDate=deletion_date))
|
||||||
|
|
||||||
|
def restore_secret(self):
|
||||||
|
secret_id = self._get_param("SecretId")
|
||||||
|
arn, name = secretsmanager_backends[self.region].restore_secret(
|
||||||
|
secret_id=secret_id,
|
||||||
|
)
|
||||||
|
return json.dumps(dict(ARN=arn, Name=name))
|
||||||
|
@ -52,8 +52,9 @@ def random_password(password_length, exclude_characters, exclude_numbers,
|
|||||||
|
|
||||||
|
|
||||||
def secret_arn(region, secret_id):
|
def secret_arn(region, secret_id):
|
||||||
return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-rIjad".format(
|
id_string = ''.join(random.choice(string.ascii_letters) for _ in range(5))
|
||||||
region, secret_id)
|
return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-{2}".format(
|
||||||
|
region, secret_id, id_string)
|
||||||
|
|
||||||
|
|
||||||
def _exclude_characters(password, exclude_characters):
|
def _exclude_characters(password, exclude_characters):
|
||||||
|
@ -80,6 +80,9 @@ class DomainDispatcherApplication(object):
|
|||||||
|
|
||||||
region = 'us-east-1'
|
region = 'us-east-1'
|
||||||
if service == 'dynamodb':
|
if service == 'dynamodb':
|
||||||
|
if environ['HTTP_X_AMZ_TARGET'].startswith('DynamoDBStreams'):
|
||||||
|
host = 'dynamodbstreams'
|
||||||
|
else:
|
||||||
dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
|
dynamo_api_version = environ['HTTP_X_AMZ_TARGET'].split("_")[1].split(".")[0]
|
||||||
# If Newer API version, use dynamodb2
|
# If Newer API version, use dynamodb2
|
||||||
if dynamo_api_version > "20111205":
|
if dynamo_api_version > "20111205":
|
||||||
|
@ -534,7 +534,7 @@ class SQSBackend(BaseBackend):
|
|||||||
break
|
break
|
||||||
|
|
||||||
import time
|
import time
|
||||||
time.sleep(0.001)
|
time.sleep(0.01)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
previous_result_count = len(result)
|
previous_result_count = len(result)
|
||||||
|
@ -420,7 +420,7 @@ CREATE_QUEUE_RESPONSE = """<CreateQueueResponse>
|
|||||||
<VisibilityTimeout>{{ queue.visibility_timeout }}</VisibilityTimeout>
|
<VisibilityTimeout>{{ queue.visibility_timeout }}</VisibilityTimeout>
|
||||||
</CreateQueueResult>
|
</CreateQueueResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</CreateQueueResponse>"""
|
</CreateQueueResponse>"""
|
||||||
|
|
||||||
@ -429,7 +429,7 @@ GET_QUEUE_URL_RESPONSE = """<GetQueueUrlResponse>
|
|||||||
<QueueUrl>{{ queue.url(request_url) }}</QueueUrl>
|
<QueueUrl>{{ queue.url(request_url) }}</QueueUrl>
|
||||||
</GetQueueUrlResult>
|
</GetQueueUrlResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</GetQueueUrlResponse>"""
|
</GetQueueUrlResponse>"""
|
||||||
|
|
||||||
@ -440,13 +440,13 @@ LIST_QUEUES_RESPONSE = """<ListQueuesResponse>
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ListQueuesResult>
|
</ListQueuesResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</ListQueuesResponse>"""
|
</ListQueuesResponse>"""
|
||||||
|
|
||||||
DELETE_QUEUE_RESPONSE = """<DeleteQueueResponse>
|
DELETE_QUEUE_RESPONSE = """<DeleteQueueResponse>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</DeleteQueueResponse>"""
|
</DeleteQueueResponse>"""
|
||||||
|
|
||||||
@ -460,13 +460,13 @@ GET_QUEUE_ATTRIBUTES_RESPONSE = """<GetQueueAttributesResponse>
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
</GetQueueAttributesResult>
|
</GetQueueAttributesResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</GetQueueAttributesResponse>"""
|
</GetQueueAttributesResponse>"""
|
||||||
|
|
||||||
SET_QUEUE_ATTRIBUTE_RESPONSE = """<SetQueueAttributesResponse>
|
SET_QUEUE_ATTRIBUTE_RESPONSE = """<SetQueueAttributesResponse>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</SetQueueAttributesResponse>"""
|
</SetQueueAttributesResponse>"""
|
||||||
|
|
||||||
@ -483,7 +483,7 @@ SEND_MESSAGE_RESPONSE = """<SendMessageResponse>
|
|||||||
</MessageId>
|
</MessageId>
|
||||||
</SendMessageResult>
|
</SendMessageResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</SendMessageResponse>"""
|
</SendMessageResponse>"""
|
||||||
|
|
||||||
@ -543,7 +543,7 @@ RECEIVE_MESSAGE_RESPONSE = """<ReceiveMessageResponse>
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ReceiveMessageResult>
|
</ReceiveMessageResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</ReceiveMessageResponse>"""
|
</ReceiveMessageResponse>"""
|
||||||
|
|
||||||
@ -561,13 +561,13 @@ SEND_MESSAGE_BATCH_RESPONSE = """<SendMessageBatchResponse>
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
</SendMessageBatchResult>
|
</SendMessageBatchResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</SendMessageBatchResponse>"""
|
</SendMessageBatchResponse>"""
|
||||||
|
|
||||||
DELETE_MESSAGE_RESPONSE = """<DeleteMessageResponse>
|
DELETE_MESSAGE_RESPONSE = """<DeleteMessageResponse>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</DeleteMessageResponse>"""
|
</DeleteMessageResponse>"""
|
||||||
|
|
||||||
@ -580,13 +580,13 @@ DELETE_MESSAGE_BATCH_RESPONSE = """<DeleteMessageBatchResponse>
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
</DeleteMessageBatchResult>
|
</DeleteMessageBatchResult>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</DeleteMessageBatchResponse>"""
|
</DeleteMessageBatchResponse>"""
|
||||||
|
|
||||||
CHANGE_MESSAGE_VISIBILITY_RESPONSE = """<ChangeMessageVisibilityResponse>
|
CHANGE_MESSAGE_VISIBILITY_RESPONSE = """<ChangeMessageVisibilityResponse>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</ChangeMessageVisibilityResponse>"""
|
</ChangeMessageVisibilityResponse>"""
|
||||||
|
|
||||||
@ -613,7 +613,7 @@ CHANGE_MESSAGE_VISIBILITY_BATCH_RESPONSE = """<ChangeMessageVisibilityBatchRespo
|
|||||||
|
|
||||||
PURGE_QUEUE_RESPONSE = """<PurgeQueueResponse>
|
PURGE_QUEUE_RESPONSE = """<PurgeQueueResponse>
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>{{ requestid }}</RequestId>
|
<RequestId></RequestId>
|
||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</PurgeQueueResponse>"""
|
</PurgeQueueResponse>"""
|
||||||
|
|
||||||
|
@ -14,10 +14,12 @@ import itertools
|
|||||||
|
|
||||||
|
|
||||||
class Parameter(BaseModel):
|
class Parameter(BaseModel):
|
||||||
def __init__(self, name, value, type, description, keyid, last_modified_date, version):
|
def __init__(self, name, value, type, description, allowed_pattern, keyid,
|
||||||
|
last_modified_date, version):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.type = type
|
self.type = type
|
||||||
self.description = description
|
self.description = description
|
||||||
|
self.allowed_pattern = allowed_pattern
|
||||||
self.keyid = keyid
|
self.keyid = keyid
|
||||||
self.last_modified_date = last_modified_date
|
self.last_modified_date = last_modified_date
|
||||||
self.version = version
|
self.version = version
|
||||||
@ -58,6 +60,10 @@ class Parameter(BaseModel):
|
|||||||
|
|
||||||
if self.keyid:
|
if self.keyid:
|
||||||
r['KeyId'] = self.keyid
|
r['KeyId'] = self.keyid
|
||||||
|
|
||||||
|
if self.allowed_pattern:
|
||||||
|
r['AllowedPattern'] = self.allowed_pattern
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
@ -291,7 +297,8 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
return self._parameters[name]
|
return self._parameters[name]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def put_parameter(self, name, description, value, type, keyid, overwrite):
|
def put_parameter(self, name, description, value, type, allowed_pattern,
|
||||||
|
keyid, overwrite):
|
||||||
previous_parameter = self._parameters.get(name)
|
previous_parameter = self._parameters.get(name)
|
||||||
version = 1
|
version = 1
|
||||||
|
|
||||||
@ -302,8 +309,8 @@ class SimpleSystemManagerBackend(BaseBackend):
|
|||||||
return
|
return
|
||||||
|
|
||||||
last_modified_date = time.time()
|
last_modified_date = time.time()
|
||||||
self._parameters[name] = Parameter(
|
self._parameters[name] = Parameter(name, value, type, description,
|
||||||
name, value, type, description, keyid, last_modified_date, version)
|
allowed_pattern, keyid, last_modified_date, version)
|
||||||
return version
|
return version
|
||||||
|
|
||||||
def add_tags_to_resource(self, resource_type, resource_id, tags):
|
def add_tags_to_resource(self, resource_type, resource_id, tags):
|
||||||
|
@ -160,11 +160,12 @@ class SimpleSystemManagerResponse(BaseResponse):
|
|||||||
description = self._get_param('Description')
|
description = self._get_param('Description')
|
||||||
value = self._get_param('Value')
|
value = self._get_param('Value')
|
||||||
type_ = self._get_param('Type')
|
type_ = self._get_param('Type')
|
||||||
|
allowed_pattern = self._get_param('AllowedPattern')
|
||||||
keyid = self._get_param('KeyId')
|
keyid = self._get_param('KeyId')
|
||||||
overwrite = self._get_param('Overwrite', False)
|
overwrite = self._get_param('Overwrite', False)
|
||||||
|
|
||||||
result = self.ssm_backend.put_parameter(
|
result = self.ssm_backend.put_parameter(
|
||||||
name, description, value, type_, keyid, overwrite)
|
name, description, value, type_, allowed_pattern, keyid, overwrite)
|
||||||
|
|
||||||
if result is None:
|
if result is None:
|
||||||
error = {
|
error = {
|
||||||
|
@ -3,6 +3,7 @@ from .responses import SimpleSystemManagerResponse
|
|||||||
|
|
||||||
url_bases = [
|
url_bases = [
|
||||||
"https?://ssm.(.+).amazonaws.com",
|
"https?://ssm.(.+).amazonaws.com",
|
||||||
|
"https?://ssm.(.+).amazonaws.com.cn",
|
||||||
]
|
]
|
||||||
|
|
||||||
url_paths = {
|
url_paths = {
|
||||||
|
31
setup.py
31
setup.py
@ -1,29 +1,44 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import codecs
|
||||||
|
import os
|
||||||
|
import re
|
||||||
import setuptools
|
import setuptools
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
# Borrowed from pip at https://github.com/pypa/pip/blob/62c27dee45625e1b63d1e023b0656310f276e050/setup.py#L11-L15
|
||||||
|
here = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
def read(*parts):
|
||||||
|
# intentionally *not* adding an encoding option to open, See:
|
||||||
|
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
|
||||||
|
with codecs.open(os.path.join(here, *parts), 'r') as fp:
|
||||||
|
return fp.read()
|
||||||
|
|
||||||
|
|
||||||
install_requires = [
|
install_requires = [
|
||||||
"Jinja2>=2.7.3",
|
"Jinja2>=2.10.1",
|
||||||
"boto>=2.36.0",
|
"boto>=2.36.0",
|
||||||
"boto3>=1.6.16",
|
"boto3>=1.9.86",
|
||||||
"botocore>=1.12.13",
|
"botocore>=1.12.86",
|
||||||
"cryptography>=2.3.0",
|
"cryptography>=2.3.0",
|
||||||
"requests>=2.5",
|
"requests>=2.5",
|
||||||
"xmltodict",
|
"xmltodict",
|
||||||
"six>1.9",
|
"six>1.9",
|
||||||
"werkzeug",
|
"werkzeug",
|
||||||
"pyaml",
|
"PyYAML",
|
||||||
"pytz",
|
"pytz",
|
||||||
"python-dateutil<3.0.0,>=2.1",
|
"python-dateutil<3.0.0,>=2.1",
|
||||||
"python-jose<3.0.0",
|
"python-jose<4.0.0",
|
||||||
"mock",
|
"mock",
|
||||||
"docker>=2.5.1",
|
"docker>=2.5.1",
|
||||||
"jsondiff==1.1.1",
|
"jsondiff==1.1.2",
|
||||||
"aws-xray-sdk!=0.96,>=0.93",
|
"aws-xray-sdk!=0.96,>=0.93",
|
||||||
"responses>=0.9.0",
|
"responses>=0.9.0",
|
||||||
|
"idna<2.9,>=2.5",
|
||||||
|
"cfn-lint",
|
||||||
]
|
]
|
||||||
|
|
||||||
extras_require = {
|
extras_require = {
|
||||||
@ -40,9 +55,11 @@ else:
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='moto',
|
name='moto',
|
||||||
version='1.3.7',
|
version='1.3.8',
|
||||||
description='A library that allows your python tests to easily'
|
description='A library that allows your python tests to easily'
|
||||||
' mock out the boto library',
|
' mock out the boto library',
|
||||||
|
long_description=read('README.md'),
|
||||||
|
long_description_content_type='text/markdown',
|
||||||
author='Steve Pulec',
|
author='Steve Pulec',
|
||||||
author_email='spulec@gmail.com',
|
author_email='spulec@gmail.com',
|
||||||
url='https://github.com/spulec/moto',
|
url='https://github.com/spulec/moto',
|
||||||
|
@ -543,6 +543,7 @@ def test_describe_load_balancers():
|
|||||||
)
|
)
|
||||||
|
|
||||||
response = client.describe_load_balancers(AutoScalingGroupName='test_asg')
|
response = client.describe_load_balancers(AutoScalingGroupName='test_asg')
|
||||||
|
assert response['ResponseMetadata']['RequestId']
|
||||||
list(response['LoadBalancers']).should.have.length_of(1)
|
list(response['LoadBalancers']).should.have.length_of(1)
|
||||||
response['LoadBalancers'][0]['LoadBalancerName'].should.equal('my-lb')
|
response['LoadBalancers'][0]['LoadBalancerName'].should.equal('my-lb')
|
||||||
|
|
||||||
@ -710,6 +711,7 @@ def test_create_autoscaling_group_boto3():
|
|||||||
'PropagateAtLaunch': False
|
'PropagateAtLaunch': False
|
||||||
}],
|
}],
|
||||||
VPCZoneIdentifier=mocked_networking['subnet1'],
|
VPCZoneIdentifier=mocked_networking['subnet1'],
|
||||||
|
NewInstancesProtectedFromScaleIn=False,
|
||||||
)
|
)
|
||||||
response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
|
response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
|
||||||
|
|
||||||
@ -728,13 +730,48 @@ def test_describe_autoscaling_groups_boto3():
|
|||||||
MaxSize=20,
|
MaxSize=20,
|
||||||
DesiredCapacity=5,
|
DesiredCapacity=5,
|
||||||
VPCZoneIdentifier=mocked_networking['subnet1'],
|
VPCZoneIdentifier=mocked_networking['subnet1'],
|
||||||
|
NewInstancesProtectedFromScaleIn=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
response = client.describe_auto_scaling_groups(
|
response = client.describe_auto_scaling_groups(
|
||||||
AutoScalingGroupNames=["test_asg"]
|
AutoScalingGroupNames=["test_asg"]
|
||||||
)
|
)
|
||||||
response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
|
response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
|
||||||
response['AutoScalingGroups'][0][
|
group = response['AutoScalingGroups'][0]
|
||||||
'AutoScalingGroupName'].should.equal('test_asg')
|
group['AutoScalingGroupName'].should.equal('test_asg')
|
||||||
|
group['NewInstancesProtectedFromScaleIn'].should.equal(True)
|
||||||
|
group['Instances'][0]['ProtectedFromScaleIn'].should.equal(True)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_autoscaling
|
||||||
|
def test_describe_autoscaling_instances_boto3():
|
||||||
|
mocked_networking = setup_networking()
|
||||||
|
client = boto3.client('autoscaling', region_name='us-east-1')
|
||||||
|
_ = client.create_launch_configuration(
|
||||||
|
LaunchConfigurationName='test_launch_configuration'
|
||||||
|
)
|
||||||
|
_ = client.create_auto_scaling_group(
|
||||||
|
AutoScalingGroupName='test_asg',
|
||||||
|
LaunchConfigurationName='test_launch_configuration',
|
||||||
|
MinSize=0,
|
||||||
|
MaxSize=20,
|
||||||
|
DesiredCapacity=5,
|
||||||
|
VPCZoneIdentifier=mocked_networking['subnet1'],
|
||||||
|
NewInstancesProtectedFromScaleIn=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.describe_auto_scaling_groups(
|
||||||
|
AutoScalingGroupNames=["test_asg"]
|
||||||
|
)
|
||||||
|
instance_ids = [
|
||||||
|
instance['InstanceId']
|
||||||
|
for instance in response['AutoScalingGroups'][0]['Instances']
|
||||||
|
]
|
||||||
|
|
||||||
|
response = client.describe_auto_scaling_instances(InstanceIds=instance_ids)
|
||||||
|
for instance in response['AutoScalingInstances']:
|
||||||
|
instance['AutoScalingGroupName'].should.equal('test_asg')
|
||||||
|
instance['ProtectedFromScaleIn'].should.equal(True)
|
||||||
|
|
||||||
|
|
||||||
@mock_autoscaling
|
@mock_autoscaling
|
||||||
@ -751,17 +788,21 @@ def test_update_autoscaling_group_boto3():
|
|||||||
MaxSize=20,
|
MaxSize=20,
|
||||||
DesiredCapacity=5,
|
DesiredCapacity=5,
|
||||||
VPCZoneIdentifier=mocked_networking['subnet1'],
|
VPCZoneIdentifier=mocked_networking['subnet1'],
|
||||||
|
NewInstancesProtectedFromScaleIn=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
response = client.update_auto_scaling_group(
|
_ = client.update_auto_scaling_group(
|
||||||
AutoScalingGroupName='test_asg',
|
AutoScalingGroupName='test_asg',
|
||||||
MinSize=1,
|
MinSize=1,
|
||||||
|
NewInstancesProtectedFromScaleIn=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
response = client.describe_auto_scaling_groups(
|
response = client.describe_auto_scaling_groups(
|
||||||
AutoScalingGroupNames=["test_asg"]
|
AutoScalingGroupNames=["test_asg"]
|
||||||
)
|
)
|
||||||
response['AutoScalingGroups'][0]['MinSize'].should.equal(1)
|
group = response['AutoScalingGroups'][0]
|
||||||
|
group['MinSize'].should.equal(1)
|
||||||
|
group['NewInstancesProtectedFromScaleIn'].should.equal(False)
|
||||||
|
|
||||||
|
|
||||||
@mock_autoscaling
|
@mock_autoscaling
|
||||||
@ -992,9 +1033,7 @@ def test_attach_one_instance():
|
|||||||
'PropagateAtLaunch': True
|
'PropagateAtLaunch': True
|
||||||
}],
|
}],
|
||||||
VPCZoneIdentifier=mocked_networking['subnet1'],
|
VPCZoneIdentifier=mocked_networking['subnet1'],
|
||||||
)
|
NewInstancesProtectedFromScaleIn=True,
|
||||||
response = client.describe_auto_scaling_groups(
|
|
||||||
AutoScalingGroupNames=['test_asg']
|
|
||||||
)
|
)
|
||||||
|
|
||||||
ec2 = boto3.resource('ec2', 'us-east-1')
|
ec2 = boto3.resource('ec2', 'us-east-1')
|
||||||
@ -1009,7 +1048,11 @@ def test_attach_one_instance():
|
|||||||
response = client.describe_auto_scaling_groups(
|
response = client.describe_auto_scaling_groups(
|
||||||
AutoScalingGroupNames=['test_asg']
|
AutoScalingGroupNames=['test_asg']
|
||||||
)
|
)
|
||||||
response['AutoScalingGroups'][0]['Instances'].should.have.length_of(3)
|
instances = response['AutoScalingGroups'][0]['Instances']
|
||||||
|
instances.should.have.length_of(3)
|
||||||
|
for instance in instances:
|
||||||
|
instance['ProtectedFromScaleIn'].should.equal(True)
|
||||||
|
|
||||||
|
|
||||||
@mock_autoscaling
|
@mock_autoscaling
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@ -1100,3 +1143,111 @@ def test_suspend_processes():
|
|||||||
launch_suspended = True
|
launch_suspended = True
|
||||||
|
|
||||||
assert launch_suspended is True
|
assert launch_suspended is True
|
||||||
|
|
||||||
|
@mock_autoscaling
|
||||||
|
def test_set_instance_protection():
|
||||||
|
mocked_networking = setup_networking()
|
||||||
|
client = boto3.client('autoscaling', region_name='us-east-1')
|
||||||
|
_ = client.create_launch_configuration(
|
||||||
|
LaunchConfigurationName='test_launch_configuration'
|
||||||
|
)
|
||||||
|
_ = client.create_auto_scaling_group(
|
||||||
|
AutoScalingGroupName='test_asg',
|
||||||
|
LaunchConfigurationName='test_launch_configuration',
|
||||||
|
MinSize=0,
|
||||||
|
MaxSize=20,
|
||||||
|
DesiredCapacity=5,
|
||||||
|
VPCZoneIdentifier=mocked_networking['subnet1'],
|
||||||
|
NewInstancesProtectedFromScaleIn=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg'])
|
||||||
|
instance_ids = [
|
||||||
|
instance['InstanceId']
|
||||||
|
for instance in response['AutoScalingGroups'][0]['Instances']
|
||||||
|
]
|
||||||
|
protected = instance_ids[:3]
|
||||||
|
|
||||||
|
_ = client.set_instance_protection(
|
||||||
|
AutoScalingGroupName='test_asg',
|
||||||
|
InstanceIds=protected,
|
||||||
|
ProtectedFromScaleIn=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg'])
|
||||||
|
for instance in response['AutoScalingGroups'][0]['Instances']:
|
||||||
|
instance['ProtectedFromScaleIn'].should.equal(
|
||||||
|
instance['InstanceId'] in protected
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_autoscaling
|
||||||
|
def test_set_desired_capacity_up_boto3():
|
||||||
|
mocked_networking = setup_networking()
|
||||||
|
client = boto3.client('autoscaling', region_name='us-east-1')
|
||||||
|
_ = client.create_launch_configuration(
|
||||||
|
LaunchConfigurationName='test_launch_configuration'
|
||||||
|
)
|
||||||
|
_ = client.create_auto_scaling_group(
|
||||||
|
AutoScalingGroupName='test_asg',
|
||||||
|
LaunchConfigurationName='test_launch_configuration',
|
||||||
|
MinSize=0,
|
||||||
|
MaxSize=20,
|
||||||
|
DesiredCapacity=5,
|
||||||
|
VPCZoneIdentifier=mocked_networking['subnet1'],
|
||||||
|
NewInstancesProtectedFromScaleIn=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
_ = client.set_desired_capacity(
|
||||||
|
AutoScalingGroupName='test_asg',
|
||||||
|
DesiredCapacity=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg'])
|
||||||
|
instances = response['AutoScalingGroups'][0]['Instances']
|
||||||
|
instances.should.have.length_of(10)
|
||||||
|
for instance in instances:
|
||||||
|
instance['ProtectedFromScaleIn'].should.equal(True)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_autoscaling
|
||||||
|
def test_set_desired_capacity_down_boto3():
|
||||||
|
mocked_networking = setup_networking()
|
||||||
|
client = boto3.client('autoscaling', region_name='us-east-1')
|
||||||
|
_ = client.create_launch_configuration(
|
||||||
|
LaunchConfigurationName='test_launch_configuration'
|
||||||
|
)
|
||||||
|
_ = client.create_auto_scaling_group(
|
||||||
|
AutoScalingGroupName='test_asg',
|
||||||
|
LaunchConfigurationName='test_launch_configuration',
|
||||||
|
MinSize=0,
|
||||||
|
MaxSize=20,
|
||||||
|
DesiredCapacity=5,
|
||||||
|
VPCZoneIdentifier=mocked_networking['subnet1'],
|
||||||
|
NewInstancesProtectedFromScaleIn=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg'])
|
||||||
|
instance_ids = [
|
||||||
|
instance['InstanceId']
|
||||||
|
for instance in response['AutoScalingGroups'][0]['Instances']
|
||||||
|
]
|
||||||
|
unprotected, protected = instance_ids[:2], instance_ids[2:]
|
||||||
|
|
||||||
|
_ = client.set_instance_protection(
|
||||||
|
AutoScalingGroupName='test_asg',
|
||||||
|
InstanceIds=unprotected,
|
||||||
|
ProtectedFromScaleIn=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
_ = client.set_desired_capacity(
|
||||||
|
AutoScalingGroupName='test_asg',
|
||||||
|
DesiredCapacity=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.describe_auto_scaling_groups(AutoScalingGroupNames=['test_asg'])
|
||||||
|
group = response['AutoScalingGroups'][0]
|
||||||
|
group['DesiredCapacity'].should.equal(1)
|
||||||
|
instance_ids = {instance['InstanceId'] for instance in group['Instances']}
|
||||||
|
set(protected).should.equal(instance_ids)
|
||||||
|
set(unprotected).should_not.be.within(instance_ids) # only unprotected killed
|
||||||
|
@ -12,6 +12,8 @@ import sure # noqa
|
|||||||
|
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings
|
from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
|
||||||
_lambda_region = 'us-west-2'
|
_lambda_region = 'us-west-2'
|
||||||
|
|
||||||
@ -397,6 +399,11 @@ def test_get_function():
|
|||||||
result = conn.get_function(FunctionName='testFunction', Qualifier='$LATEST')
|
result = conn.get_function(FunctionName='testFunction', Qualifier='$LATEST')
|
||||||
result['Configuration']['Version'].should.equal('$LATEST')
|
result['Configuration']['Version'].should.equal('$LATEST')
|
||||||
|
|
||||||
|
# Test get function when can't find function name
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
conn.get_function(FunctionName='junk', Qualifier='$LATEST')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -464,7 +471,8 @@ def test_publish():
|
|||||||
function_list['Functions'].should.have.length_of(1)
|
function_list['Functions'].should.have.length_of(1)
|
||||||
latest_arn = function_list['Functions'][0]['FunctionArn']
|
latest_arn = function_list['Functions'][0]['FunctionArn']
|
||||||
|
|
||||||
conn.publish_version(FunctionName='testFunction')
|
res = conn.publish_version(FunctionName='testFunction')
|
||||||
|
assert res['ResponseMetadata']['HTTPStatusCode'] == 201
|
||||||
|
|
||||||
function_list = conn.list_functions()
|
function_list = conn.list_functions()
|
||||||
function_list['Functions'].should.have.length_of(2)
|
function_list['Functions'].should.have.length_of(2)
|
||||||
@ -819,3 +827,87 @@ def get_function_policy():
|
|||||||
assert isinstance(response['Policy'], str)
|
assert isinstance(response['Policy'], str)
|
||||||
res = json.loads(response['Policy'])
|
res = json.loads(response['Policy'])
|
||||||
assert res['Statement'][0]['Action'] == 'lambda:InvokeFunction'
|
assert res['Statement'][0]['Action'] == 'lambda:InvokeFunction'
|
||||||
|
|
||||||
|
|
||||||
|
@mock_lambda
|
||||||
|
@mock_s3
|
||||||
|
def test_list_versions_by_function():
|
||||||
|
s3_conn = boto3.client('s3', 'us-west-2')
|
||||||
|
s3_conn.create_bucket(Bucket='test-bucket')
|
||||||
|
|
||||||
|
zip_content = get_test_zip_file2()
|
||||||
|
s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content)
|
||||||
|
conn = boto3.client('lambda', 'us-west-2')
|
||||||
|
|
||||||
|
conn.create_function(
|
||||||
|
FunctionName='testFunction',
|
||||||
|
Runtime='python2.7',
|
||||||
|
Role='test-iam-role',
|
||||||
|
Handler='lambda_function.lambda_handler',
|
||||||
|
Code={
|
||||||
|
'S3Bucket': 'test-bucket',
|
||||||
|
'S3Key': 'test.zip',
|
||||||
|
},
|
||||||
|
Description='test lambda function',
|
||||||
|
Timeout=3,
|
||||||
|
MemorySize=128,
|
||||||
|
Publish=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
res = conn.publish_version(FunctionName='testFunction')
|
||||||
|
assert res['ResponseMetadata']['HTTPStatusCode'] == 201
|
||||||
|
versions = conn.list_versions_by_function(FunctionName='testFunction')
|
||||||
|
|
||||||
|
assert versions['Versions'][0]['FunctionArn'] == 'arn:aws:lambda:us-west-2:123456789012:function:testFunction:$LATEST'
|
||||||
|
|
||||||
|
|
||||||
|
@mock_lambda
|
||||||
|
@mock_s3
|
||||||
|
def test_create_function_with_already_exists():
|
||||||
|
s3_conn = boto3.client('s3', 'us-west-2')
|
||||||
|
s3_conn.create_bucket(Bucket='test-bucket')
|
||||||
|
|
||||||
|
zip_content = get_test_zip_file2()
|
||||||
|
s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content)
|
||||||
|
conn = boto3.client('lambda', 'us-west-2')
|
||||||
|
|
||||||
|
conn.create_function(
|
||||||
|
FunctionName='testFunction',
|
||||||
|
Runtime='python2.7',
|
||||||
|
Role='test-iam-role',
|
||||||
|
Handler='lambda_function.lambda_handler',
|
||||||
|
Code={
|
||||||
|
'S3Bucket': 'test-bucket',
|
||||||
|
'S3Key': 'test.zip',
|
||||||
|
},
|
||||||
|
Description='test lambda function',
|
||||||
|
Timeout=3,
|
||||||
|
MemorySize=128,
|
||||||
|
Publish=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = conn.create_function(
|
||||||
|
FunctionName='testFunction',
|
||||||
|
Runtime='python2.7',
|
||||||
|
Role='test-iam-role',
|
||||||
|
Handler='lambda_function.lambda_handler',
|
||||||
|
Code={
|
||||||
|
'S3Bucket': 'test-bucket',
|
||||||
|
'S3Key': 'test.zip',
|
||||||
|
},
|
||||||
|
Description='test lambda function',
|
||||||
|
Timeout=3,
|
||||||
|
MemorySize=128,
|
||||||
|
Publish=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response['FunctionName'] == 'testFunction'
|
||||||
|
|
||||||
|
|
||||||
|
@mock_lambda
|
||||||
|
@mock_s3
|
||||||
|
def test_list_versions_by_function_for_nonexistent_function():
|
||||||
|
conn = boto3.client('lambda', 'us-west-2')
|
||||||
|
versions = conn.list_versions_by_function(FunctionName='testFunction')
|
||||||
|
|
||||||
|
assert len(versions['Versions']) == 0
|
||||||
|
@ -323,6 +323,54 @@ def test_create_job_queue():
|
|||||||
resp.should.contain('jobQueues')
|
resp.should.contain('jobQueues')
|
||||||
len(resp['jobQueues']).should.equal(0)
|
len(resp['jobQueues']).should.equal(0)
|
||||||
|
|
||||||
|
# Create job queue which already exists
|
||||||
|
try:
|
||||||
|
resp = batch_client.create_job_queue(
|
||||||
|
jobQueueName='test_job_queue',
|
||||||
|
state='ENABLED',
|
||||||
|
priority=123,
|
||||||
|
computeEnvironmentOrder=[
|
||||||
|
{
|
||||||
|
'order': 123,
|
||||||
|
'computeEnvironment': arn
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
except ClientError as err:
|
||||||
|
err.response['Error']['Code'].should.equal('ClientException')
|
||||||
|
|
||||||
|
|
||||||
|
# Create job queue with incorrect state
|
||||||
|
try:
|
||||||
|
resp = batch_client.create_job_queue(
|
||||||
|
jobQueueName='test_job_queue2',
|
||||||
|
state='JUNK',
|
||||||
|
priority=123,
|
||||||
|
computeEnvironmentOrder=[
|
||||||
|
{
|
||||||
|
'order': 123,
|
||||||
|
'computeEnvironment': arn
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
except ClientError as err:
|
||||||
|
err.response['Error']['Code'].should.equal('ClientException')
|
||||||
|
|
||||||
|
# Create job queue with no compute env
|
||||||
|
try:
|
||||||
|
resp = batch_client.create_job_queue(
|
||||||
|
jobQueueName='test_job_queue3',
|
||||||
|
state='JUNK',
|
||||||
|
priority=123,
|
||||||
|
computeEnvironmentOrder=[
|
||||||
|
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
except ClientError as err:
|
||||||
|
err.response['Error']['Code'].should.equal('ClientException')
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@mock_ecs
|
@mock_ecs
|
||||||
@ -397,6 +445,17 @@ def test_update_job_queue():
|
|||||||
len(resp['jobQueues']).should.equal(1)
|
len(resp['jobQueues']).should.equal(1)
|
||||||
resp['jobQueues'][0]['priority'].should.equal(5)
|
resp['jobQueues'][0]['priority'].should.equal(5)
|
||||||
|
|
||||||
|
batch_client.update_job_queue(
|
||||||
|
jobQueue='test_job_queue',
|
||||||
|
priority=5
|
||||||
|
)
|
||||||
|
|
||||||
|
resp = batch_client.describe_job_queues()
|
||||||
|
resp.should.contain('jobQueues')
|
||||||
|
len(resp['jobQueues']).should.equal(1)
|
||||||
|
resp['jobQueues'][0]['priority'].should.equal(5)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@mock_ecs
|
@mock_ecs
|
||||||
|
@ -29,6 +29,10 @@ template = {
|
|||||||
"NinjaENI": {
|
"NinjaENI": {
|
||||||
"Description": "Elastic IP mapping to Auto-Scaling Group",
|
"Description": "Elastic IP mapping to Auto-Scaling Group",
|
||||||
"Value": {"Ref": "ENI"}
|
"Value": {"Ref": "ENI"}
|
||||||
|
},
|
||||||
|
"ENIIpAddress": {
|
||||||
|
"Description": "ENI's Private IP address",
|
||||||
|
"Value": {"Fn::GetAtt": ["ENI", "PrimaryPrivateIpAddress"]}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -266,9 +266,9 @@ def test_delete_stack_by_name():
|
|||||||
template_body=dummy_template_json,
|
template_body=dummy_template_json,
|
||||||
)
|
)
|
||||||
|
|
||||||
conn.list_stacks().should.have.length_of(1)
|
conn.describe_stacks().should.have.length_of(1)
|
||||||
conn.delete_stack("test_stack")
|
conn.delete_stack("test_stack")
|
||||||
conn.list_stacks().should.have.length_of(0)
|
conn.describe_stacks().should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation_deprecated
|
@mock_cloudformation_deprecated
|
||||||
@ -279,9 +279,9 @@ def test_delete_stack_by_id():
|
|||||||
template_body=dummy_template_json,
|
template_body=dummy_template_json,
|
||||||
)
|
)
|
||||||
|
|
||||||
conn.list_stacks().should.have.length_of(1)
|
conn.describe_stacks().should.have.length_of(1)
|
||||||
conn.delete_stack(stack_id)
|
conn.delete_stack(stack_id)
|
||||||
conn.list_stacks().should.have.length_of(0)
|
conn.describe_stacks().should.have.length_of(0)
|
||||||
with assert_raises(BotoServerError):
|
with assert_raises(BotoServerError):
|
||||||
conn.describe_stacks("test_stack")
|
conn.describe_stacks("test_stack")
|
||||||
|
|
||||||
@ -296,9 +296,9 @@ def test_delete_stack_with_resource_missing_delete_attr():
|
|||||||
template_body=dummy_template_json3,
|
template_body=dummy_template_json3,
|
||||||
)
|
)
|
||||||
|
|
||||||
conn.list_stacks().should.have.length_of(1)
|
conn.describe_stacks().should.have.length_of(1)
|
||||||
conn.delete_stack("test_stack")
|
conn.delete_stack("test_stack")
|
||||||
conn.list_stacks().should.have.length_of(0)
|
conn.describe_stacks().should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation_deprecated
|
@mock_cloudformation_deprecated
|
||||||
|
@ -184,6 +184,423 @@ dummy_import_template_json = json.dumps(dummy_import_template)
|
|||||||
dummy_redrive_template_json = json.dumps(dummy_redrive_template)
|
dummy_redrive_template_json = json.dumps(dummy_redrive_template)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_describe_stack_instances():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
usw2_instance = cf_conn.describe_stack_instance(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
StackInstanceAccount='123456789012',
|
||||||
|
StackInstanceRegion='us-west-2',
|
||||||
|
)
|
||||||
|
use1_instance = cf_conn.describe_stack_instance(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
StackInstanceAccount='123456789012',
|
||||||
|
StackInstanceRegion='us-east-1',
|
||||||
|
)
|
||||||
|
|
||||||
|
usw2_instance['StackInstance'].should.have.key('Region').which.should.equal('us-west-2')
|
||||||
|
usw2_instance['StackInstance'].should.have.key('Account').which.should.equal('123456789012')
|
||||||
|
use1_instance['StackInstance'].should.have.key('Region').which.should.equal('us-east-1')
|
||||||
|
use1_instance['StackInstance'].should.have.key('Account').which.should.equal('123456789012')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_list_stacksets_length():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set2",
|
||||||
|
TemplateBody=dummy_template_yaml,
|
||||||
|
)
|
||||||
|
stacksets = cf_conn.list_stack_sets()
|
||||||
|
stacksets.should.have.length_of(2)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_list_stacksets_contents():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
stacksets = cf_conn.list_stack_sets()
|
||||||
|
stacksets['Summaries'][0].should.have.key('StackSetName').which.should.equal('test_stack_set')
|
||||||
|
stacksets['Summaries'][0].should.have.key('Status').which.should.equal('ACTIVE')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_stop_stack_set_operation():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
operation_id = cf_conn.list_stack_set_operations(
|
||||||
|
StackSetName="test_stack_set")['Summaries'][-1]['OperationId']
|
||||||
|
cf_conn.stop_stack_set_operation(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
OperationId=operation_id
|
||||||
|
)
|
||||||
|
list_operation = cf_conn.list_stack_set_operations(
|
||||||
|
StackSetName="test_stack_set"
|
||||||
|
)
|
||||||
|
list_operation['Summaries'][-1]['Status'].should.equal('STOPPED')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_describe_stack_set_operation():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
operation_id = cf_conn.list_stack_set_operations(
|
||||||
|
StackSetName="test_stack_set")['Summaries'][-1]['OperationId']
|
||||||
|
cf_conn.stop_stack_set_operation(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
OperationId=operation_id
|
||||||
|
)
|
||||||
|
response = cf_conn.describe_stack_set_operation(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
OperationId=operation_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
response['StackSetOperation']['Status'].should.equal('STOPPED')
|
||||||
|
response['StackSetOperation']['Action'].should.equal('CREATE')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_list_stack_set_operation_results():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
operation_id = cf_conn.list_stack_set_operations(
|
||||||
|
StackSetName="test_stack_set")['Summaries'][-1]['OperationId']
|
||||||
|
|
||||||
|
cf_conn.stop_stack_set_operation(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
OperationId=operation_id
|
||||||
|
)
|
||||||
|
response = cf_conn.list_stack_set_operation_results(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
OperationId=operation_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
response['Summaries'].should.have.length_of(3)
|
||||||
|
response['Summaries'][0].should.have.key('Account').which.should.equal('123456789012')
|
||||||
|
response['Summaries'][1].should.have.key('Status').which.should.equal('STOPPED')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_update_stack_instances():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
param = [
|
||||||
|
{'ParameterKey': 'SomeParam', 'ParameterValue': 'StackSetValue'},
|
||||||
|
{'ParameterKey': 'AnotherParam', 'ParameterValue': 'StackSetValue2'},
|
||||||
|
]
|
||||||
|
param_overrides = [
|
||||||
|
{'ParameterKey': 'SomeParam', 'ParameterValue': 'OverrideValue'},
|
||||||
|
{'ParameterKey': 'AnotherParam', 'ParameterValue': 'OverrideValue2'}
|
||||||
|
]
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_yaml_with_ref,
|
||||||
|
Parameters=param,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
cf_conn.update_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-west-1', 'us-west-2'],
|
||||||
|
ParameterOverrides=param_overrides,
|
||||||
|
)
|
||||||
|
usw2_instance = cf_conn.describe_stack_instance(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
StackInstanceAccount='123456789012',
|
||||||
|
StackInstanceRegion='us-west-2',
|
||||||
|
)
|
||||||
|
usw1_instance = cf_conn.describe_stack_instance(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
StackInstanceAccount='123456789012',
|
||||||
|
StackInstanceRegion='us-west-1',
|
||||||
|
)
|
||||||
|
use1_instance = cf_conn.describe_stack_instance(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
StackInstanceAccount='123456789012',
|
||||||
|
StackInstanceRegion='us-east-1',
|
||||||
|
)
|
||||||
|
|
||||||
|
usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey'])
|
||||||
|
usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue'])
|
||||||
|
usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey'])
|
||||||
|
usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue'])
|
||||||
|
|
||||||
|
usw1_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey'])
|
||||||
|
usw1_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue'])
|
||||||
|
usw1_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey'])
|
||||||
|
usw1_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue'])
|
||||||
|
|
||||||
|
use1_instance['StackInstance']['ParameterOverrides'].should.be.empty
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_delete_stack_instances():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn.delete_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1'],
|
||||||
|
RetainStacks=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'].should.have.length_of(1)
|
||||||
|
cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'][0]['Region'].should.equal(
|
||||||
|
'us-west-2')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_create_stack_instances():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'].should.have.length_of(2)
|
||||||
|
cf_conn.list_stack_instances(StackSetName="test_stack_set")['Summaries'][0]['Account'].should.equal(
|
||||||
|
'123456789012')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_create_stack_instances_with_param_overrides():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
param = [
|
||||||
|
{'ParameterKey': 'TagDescription', 'ParameterValue': 'StackSetValue'},
|
||||||
|
{'ParameterKey': 'TagName', 'ParameterValue': 'StackSetValue2'},
|
||||||
|
]
|
||||||
|
param_overrides = [
|
||||||
|
{'ParameterKey': 'TagDescription', 'ParameterValue': 'OverrideValue'},
|
||||||
|
{'ParameterKey': 'TagName', 'ParameterValue': 'OverrideValue2'}
|
||||||
|
]
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_yaml_with_ref,
|
||||||
|
Parameters=param,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-2'],
|
||||||
|
ParameterOverrides=param_overrides,
|
||||||
|
)
|
||||||
|
usw2_instance = cf_conn.describe_stack_instance(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
StackInstanceAccount='123456789012',
|
||||||
|
StackInstanceRegion='us-west-2',
|
||||||
|
)
|
||||||
|
|
||||||
|
usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey'])
|
||||||
|
usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey'])
|
||||||
|
usw2_instance['StackInstance']['ParameterOverrides'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue'])
|
||||||
|
usw2_instance['StackInstance']['ParameterOverrides'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue'])
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_update_stack_set():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
param = [
|
||||||
|
{'ParameterKey': 'TagDescription', 'ParameterValue': 'StackSetValue'},
|
||||||
|
{'ParameterKey': 'TagName', 'ParameterValue': 'StackSetValue2'},
|
||||||
|
]
|
||||||
|
param_overrides = [
|
||||||
|
{'ParameterKey': 'TagDescription', 'ParameterValue': 'OverrideValue'},
|
||||||
|
{'ParameterKey': 'TagName', 'ParameterValue': 'OverrideValue2'}
|
||||||
|
]
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_yaml_with_ref,
|
||||||
|
Parameters=param,
|
||||||
|
)
|
||||||
|
cf_conn.update_stack_set(
|
||||||
|
StackSetName='test_stack_set',
|
||||||
|
TemplateBody=dummy_template_yaml_with_ref,
|
||||||
|
Parameters=param_overrides,
|
||||||
|
)
|
||||||
|
stackset = cf_conn.describe_stack_set(StackSetName='test_stack_set')
|
||||||
|
|
||||||
|
stackset['StackSet']['Parameters'][0]['ParameterValue'].should.equal(param_overrides[0]['ParameterValue'])
|
||||||
|
stackset['StackSet']['Parameters'][1]['ParameterValue'].should.equal(param_overrides[1]['ParameterValue'])
|
||||||
|
stackset['StackSet']['Parameters'][0]['ParameterKey'].should.equal(param_overrides[0]['ParameterKey'])
|
||||||
|
stackset['StackSet']['Parameters'][1]['ParameterKey'].should.equal(param_overrides[1]['ParameterKey'])
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_list_stack_set_operations():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.create_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
cf_conn.update_stack_instances(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
Accounts=['123456789012'],
|
||||||
|
Regions=['us-east-1', 'us-west-2'],
|
||||||
|
)
|
||||||
|
|
||||||
|
list_operation = cf_conn.list_stack_set_operations(StackSetName="test_stack_set")
|
||||||
|
list_operation['Summaries'].should.have.length_of(2)
|
||||||
|
list_operation['Summaries'][-1]['Action'].should.equal('UPDATE')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_delete_stack_set():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
cf_conn.delete_stack_set(StackSetName='test_stack_set')
|
||||||
|
|
||||||
|
cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['Status'].should.equal(
|
||||||
|
'DELETED')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_create_stack_set():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['TemplateBody'].should.equal(
|
||||||
|
dummy_template_json)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_create_stack_set_with_yaml():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack_set",
|
||||||
|
TemplateBody=dummy_template_yaml,
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn.describe_stack_set(StackSetName="test_stack_set")['StackSet']['TemplateBody'].should.equal(
|
||||||
|
dummy_template_yaml)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
@mock_s3
|
||||||
|
def test_create_stack_set_from_s3_url():
|
||||||
|
s3 = boto3.client('s3')
|
||||||
|
s3_conn = boto3.resource('s3')
|
||||||
|
bucket = s3_conn.create_bucket(Bucket="foobar")
|
||||||
|
|
||||||
|
key = s3_conn.Object(
|
||||||
|
'foobar', 'template-key').put(Body=dummy_template_json)
|
||||||
|
key_url = s3.generate_presigned_url(
|
||||||
|
ClientMethod='get_object',
|
||||||
|
Params={
|
||||||
|
'Bucket': 'foobar',
|
||||||
|
'Key': 'template-key'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-west-1')
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName='stack_from_url',
|
||||||
|
TemplateURL=key_url,
|
||||||
|
)
|
||||||
|
cf_conn.describe_stack_set(StackSetName="stack_from_url")['StackSet']['TemplateBody'].should.equal(
|
||||||
|
dummy_template_json)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_create_stack_set_with_ref_yaml():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
params = [
|
||||||
|
{'ParameterKey': 'TagDescription', 'ParameterValue': 'desc_ref'},
|
||||||
|
{'ParameterKey': 'TagName', 'ParameterValue': 'name_ref'},
|
||||||
|
]
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack",
|
||||||
|
TemplateBody=dummy_template_yaml_with_ref,
|
||||||
|
Parameters=params
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn.describe_stack_set(StackSetName="test_stack")['StackSet']['TemplateBody'].should.equal(
|
||||||
|
dummy_template_yaml_with_ref)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_describe_stack_set_params():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
params = [
|
||||||
|
{'ParameterKey': 'TagDescription', 'ParameterValue': 'desc_ref'},
|
||||||
|
{'ParameterKey': 'TagName', 'ParameterValue': 'name_ref'},
|
||||||
|
]
|
||||||
|
cf_conn.create_stack_set(
|
||||||
|
StackSetName="test_stack",
|
||||||
|
TemplateBody=dummy_template_yaml_with_ref,
|
||||||
|
Parameters=params
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn.describe_stack_set(StackSetName="test_stack")['StackSet']['Parameters'].should.equal(
|
||||||
|
params)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
def test_boto3_create_stack():
|
def test_boto3_create_stack():
|
||||||
@ -391,11 +808,40 @@ def test_create_change_set_from_s3_url():
|
|||||||
TemplateURL=key_url,
|
TemplateURL=key_url,
|
||||||
ChangeSetName='NewChangeSet',
|
ChangeSetName='NewChangeSet',
|
||||||
ChangeSetType='CREATE',
|
ChangeSetType='CREATE',
|
||||||
|
Tags=[
|
||||||
|
{'Key': 'tag-key', 'Value': 'tag-value'}
|
||||||
|
],
|
||||||
)
|
)
|
||||||
assert 'arn:aws:cloudformation:us-west-1:123456789:changeSet/NewChangeSet/' in response['Id']
|
assert 'arn:aws:cloudformation:us-west-1:123456789:changeSet/NewChangeSet/' in response['Id']
|
||||||
assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId']
|
assert 'arn:aws:cloudformation:us-east-1:123456789:stack/NewStack' in response['StackId']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_describe_change_set():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_change_set(
|
||||||
|
StackName='NewStack',
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
ChangeSetName='NewChangeSet',
|
||||||
|
ChangeSetType='CREATE',
|
||||||
|
)
|
||||||
|
|
||||||
|
stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet")
|
||||||
|
stack['ChangeSetName'].should.equal('NewChangeSet')
|
||||||
|
stack['StackName'].should.equal('NewStack')
|
||||||
|
|
||||||
|
cf_conn.create_change_set(
|
||||||
|
StackName='NewStack',
|
||||||
|
TemplateBody=dummy_update_template_json,
|
||||||
|
ChangeSetName='NewChangeSet2',
|
||||||
|
ChangeSetType='UPDATE',
|
||||||
|
)
|
||||||
|
stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet2")
|
||||||
|
stack['ChangeSetName'].should.equal('NewChangeSet2')
|
||||||
|
stack['StackName'].should.equal('NewStack')
|
||||||
|
stack['Changes'].should.have.length_of(2)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
def test_execute_change_set_w_arn():
|
def test_execute_change_set_w_arn():
|
||||||
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
@ -417,7 +863,7 @@ def test_execute_change_set_w_name():
|
|||||||
ChangeSetName='NewChangeSet',
|
ChangeSetName='NewChangeSet',
|
||||||
ChangeSetType='CREATE',
|
ChangeSetType='CREATE',
|
||||||
)
|
)
|
||||||
cf_conn.execute_change_set(ChangeSetName='NewStack', StackName='NewStack')
|
cf_conn.execute_change_set(ChangeSetName='NewChangeSet', StackName='NewStack')
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
@ -486,6 +932,20 @@ def test_describe_stack_by_stack_id():
|
|||||||
stack_by_id['StackName'].should.equal("test_stack")
|
stack_by_id['StackName'].should.equal("test_stack")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_list_change_sets():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_change_set(
|
||||||
|
StackName='NewStack2',
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
ChangeSetName='NewChangeSet2',
|
||||||
|
ChangeSetType='CREATE',
|
||||||
|
)
|
||||||
|
change_set = cf_conn.list_change_sets(StackName='NewStack2')['Summaries'][0]
|
||||||
|
change_set['StackName'].should.equal('NewStack2')
|
||||||
|
change_set['ChangeSetName'].should.equal('NewChangeSet2')
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
def test_list_stacks():
|
def test_list_stacks():
|
||||||
cf = boto3.resource('cloudformation', region_name='us-east-1')
|
cf = boto3.resource('cloudformation', region_name='us-east-1')
|
||||||
@ -518,6 +978,22 @@ def test_delete_stack_from_resource():
|
|||||||
list(cf.stacks.all()).should.have.length_of(0)
|
list(cf.stacks.all()).should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
@mock_ec2
|
||||||
|
def test_delete_change_set():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf_conn.create_change_set(
|
||||||
|
StackName='NewStack',
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
ChangeSetName='NewChangeSet',
|
||||||
|
ChangeSetType='CREATE',
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_conn.list_change_sets(StackName='NewStack')['Summaries'].should.have.length_of(1)
|
||||||
|
cf_conn.delete_change_set(ChangeSetName='NewChangeSet', StackName='NewStack')
|
||||||
|
cf_conn.list_change_sets(StackName='NewStack')['Summaries'].should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_delete_stack_by_name():
|
def test_delete_stack_by_name():
|
||||||
@ -532,6 +1008,21 @@ def test_delete_stack_by_name():
|
|||||||
cf_conn.describe_stacks()['Stacks'].should.have.length_of(0)
|
cf_conn.describe_stacks()['Stacks'].should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_delete_stack():
|
||||||
|
cf = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
cf.create_stack(
|
||||||
|
StackName="test_stack",
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
|
||||||
|
cf.delete_stack(
|
||||||
|
StackName="test_stack",
|
||||||
|
)
|
||||||
|
stacks = cf.list_stacks()
|
||||||
|
assert stacks['StackSummaries'][0]['StackStatus'] == 'DELETE_COMPLETE'
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
def test_describe_deleted_stack():
|
def test_describe_deleted_stack():
|
||||||
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
@ -1952,7 +1952,12 @@ def lambda_handler(event, context):
|
|||||||
"Description": "Test function",
|
"Description": "Test function",
|
||||||
"MemorySize": 128,
|
"MemorySize": 128,
|
||||||
"Role": "test-role",
|
"Role": "test-role",
|
||||||
"Runtime": "python2.7"
|
"Runtime": "python2.7",
|
||||||
|
"Environment": {
|
||||||
|
"Variables": {
|
||||||
|
"TEST_ENV_KEY": "test-env-val",
|
||||||
|
}
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1973,6 +1978,9 @@ def lambda_handler(event, context):
|
|||||||
result['Functions'][0]['MemorySize'].should.equal(128)
|
result['Functions'][0]['MemorySize'].should.equal(128)
|
||||||
result['Functions'][0]['Role'].should.equal('test-role')
|
result['Functions'][0]['Role'].should.equal('test-role')
|
||||||
result['Functions'][0]['Runtime'].should.equal('python2.7')
|
result['Functions'][0]['Runtime'].should.equal('python2.7')
|
||||||
|
result['Functions'][0]['Environment'].should.equal({
|
||||||
|
"Variables": {"TEST_ENV_KEY": "test-env-val"}
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
|
@ -448,8 +448,8 @@ def test_short_form_func_in_yaml_teamplate():
|
|||||||
KeySplit: !Split [A, B]
|
KeySplit: !Split [A, B]
|
||||||
KeySub: !Sub A
|
KeySub: !Sub A
|
||||||
"""
|
"""
|
||||||
yaml.add_multi_constructor('', yaml_tag_constructor)
|
yaml.add_multi_constructor('', yaml_tag_constructor, Loader=yaml.Loader)
|
||||||
template_dict = yaml.load(template)
|
template_dict = yaml.load(template, Loader=yaml.Loader)
|
||||||
key_and_expects = [
|
key_and_expects = [
|
||||||
['KeyRef', {'Ref': 'foo'}],
|
['KeyRef', {'Ref': 'foo'}],
|
||||||
['KeyB64', {'Fn::Base64': 'valueToEncode'}],
|
['KeyB64', {'Fn::Base64': 'valueToEncode'}],
|
||||||
|
115
tests/test_cloudformation/test_validate.py
Normal file
115
tests/test_cloudformation/test_validate.py
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
import json
|
||||||
|
import yaml
|
||||||
|
import os
|
||||||
|
import boto3
|
||||||
|
from nose.tools import raises
|
||||||
|
import botocore
|
||||||
|
|
||||||
|
|
||||||
|
from moto.cloudformation.exceptions import ValidationError
|
||||||
|
from moto.cloudformation.models import FakeStack
|
||||||
|
from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export
|
||||||
|
from moto.sqs.models import Queue
|
||||||
|
from moto.s3.models import FakeBucket
|
||||||
|
from moto.cloudformation.utils import yaml_tag_constructor
|
||||||
|
from boto.cloudformation.stack import Output
|
||||||
|
from moto import mock_cloudformation, mock_s3, mock_sqs, mock_ec2
|
||||||
|
|
||||||
|
json_template = {
|
||||||
|
"AWSTemplateFormatVersion": "2010-09-09",
|
||||||
|
"Description": "Stack 1",
|
||||||
|
"Resources": {
|
||||||
|
"EC2Instance1": {
|
||||||
|
"Type": "AWS::EC2::Instance",
|
||||||
|
"Properties": {
|
||||||
|
"ImageId": "ami-d3adb33f",
|
||||||
|
"KeyName": "dummy",
|
||||||
|
"InstanceType": "t2.micro",
|
||||||
|
"Tags": [
|
||||||
|
{
|
||||||
|
"Key": "Description",
|
||||||
|
"Value": "Test tag"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Key": "Name",
|
||||||
|
"Value": "Name tag for tests"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# One resource is required
|
||||||
|
json_bad_template = {
|
||||||
|
"AWSTemplateFormatVersion": "2010-09-09",
|
||||||
|
"Description": "Stack 1"
|
||||||
|
}
|
||||||
|
|
||||||
|
dummy_template_json = json.dumps(json_template)
|
||||||
|
dummy_bad_template_json = json.dumps(json_bad_template)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_json_validate_successful():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
response = cf_conn.validate_template(
|
||||||
|
TemplateBody=dummy_template_json,
|
||||||
|
)
|
||||||
|
assert response['Description'] == "Stack 1"
|
||||||
|
assert response['Parameters'] == []
|
||||||
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_json_invalid_missing_resource():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
try:
|
||||||
|
cf_conn.validate_template(
|
||||||
|
TemplateBody=dummy_bad_template_json,
|
||||||
|
)
|
||||||
|
assert False
|
||||||
|
except botocore.exceptions.ClientError as e:
|
||||||
|
assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \
|
||||||
|
' with id Missing top level item Resources to file module does not exist'
|
||||||
|
assert True
|
||||||
|
|
||||||
|
|
||||||
|
yaml_template = """
|
||||||
|
AWSTemplateFormatVersion: '2010-09-09'
|
||||||
|
Description: Simple CloudFormation Test Template
|
||||||
|
Resources:
|
||||||
|
S3Bucket:
|
||||||
|
Type: AWS::S3::Bucket
|
||||||
|
Properties:
|
||||||
|
AccessControl: PublicRead
|
||||||
|
BucketName: cf-test-bucket-1
|
||||||
|
"""
|
||||||
|
|
||||||
|
yaml_bad_template = """
|
||||||
|
AWSTemplateFormatVersion: '2010-09-09'
|
||||||
|
Description: Simple CloudFormation Test Template
|
||||||
|
"""
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_yaml_validate_successful():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
response = cf_conn.validate_template(
|
||||||
|
TemplateBody=yaml_template,
|
||||||
|
)
|
||||||
|
assert response['Description'] == "Simple CloudFormation Test Template"
|
||||||
|
assert response['Parameters'] == []
|
||||||
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_boto3_yaml_invalid_missing_resource():
|
||||||
|
cf_conn = boto3.client('cloudformation', region_name='us-east-1')
|
||||||
|
try:
|
||||||
|
cf_conn.validate_template(
|
||||||
|
TemplateBody=yaml_bad_template,
|
||||||
|
)
|
||||||
|
assert False
|
||||||
|
except botocore.exceptions.ClientError as e:
|
||||||
|
assert str(e) == 'An error occurred (ValidationError) when calling the ValidateTemplate operation: Stack' \
|
||||||
|
' with id Missing top level item Resources to file module does not exist'
|
||||||
|
assert True
|
@ -1,14 +1,18 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import boto3
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import random
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
import sure # noqa
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
from jose import jws
|
from jose import jws
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
from moto import mock_cognitoidp
|
from moto import mock_cognitoidp
|
||||||
import sure # noqa
|
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidp
|
@mock_cognitoidp
|
||||||
@ -41,6 +45,56 @@ def test_list_user_pools():
|
|||||||
result["UserPools"][0]["Name"].should.equal(name)
|
result["UserPools"][0]["Name"].should.equal(name)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_user_pools_returns_max_items():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
# Given 10 user pools
|
||||||
|
pool_count = 10
|
||||||
|
for i in range(pool_count):
|
||||||
|
conn.create_user_pool(PoolName=str(uuid.uuid4()))
|
||||||
|
|
||||||
|
max_results = 5
|
||||||
|
result = conn.list_user_pools(MaxResults=max_results)
|
||||||
|
result["UserPools"].should.have.length_of(max_results)
|
||||||
|
result.should.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_user_pools_returns_next_tokens():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
# Given 10 user pool clients
|
||||||
|
pool_count = 10
|
||||||
|
for i in range(pool_count):
|
||||||
|
conn.create_user_pool(PoolName=str(uuid.uuid4()))
|
||||||
|
|
||||||
|
max_results = 5
|
||||||
|
result = conn.list_user_pools(MaxResults=max_results)
|
||||||
|
result["UserPools"].should.have.length_of(max_results)
|
||||||
|
result.should.have.key("NextToken")
|
||||||
|
|
||||||
|
next_token = result["NextToken"]
|
||||||
|
result_2 = conn.list_user_pools(MaxResults=max_results, NextToken=next_token)
|
||||||
|
result_2["UserPools"].should.have.length_of(max_results)
|
||||||
|
result_2.shouldnt.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_user_pools_when_max_items_more_than_total_items():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
# Given 10 user pool clients
|
||||||
|
pool_count = 10
|
||||||
|
for i in range(pool_count):
|
||||||
|
conn.create_user_pool(PoolName=str(uuid.uuid4()))
|
||||||
|
|
||||||
|
max_results = pool_count + 5
|
||||||
|
result = conn.list_user_pools(MaxResults=max_results)
|
||||||
|
result["UserPools"].should.have.length_of(pool_count)
|
||||||
|
result.shouldnt.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidp
|
@mock_cognitoidp
|
||||||
def test_describe_user_pool():
|
def test_describe_user_pool():
|
||||||
conn = boto3.client("cognito-idp", "us-west-2")
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
@ -140,6 +194,67 @@ def test_list_user_pool_clients():
|
|||||||
result["UserPoolClients"][0]["ClientName"].should.equal(client_name)
|
result["UserPoolClients"][0]["ClientName"].should.equal(client_name)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_user_pool_clients_returns_max_items():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 user pool clients
|
||||||
|
client_count = 10
|
||||||
|
for i in range(client_count):
|
||||||
|
client_name = str(uuid.uuid4())
|
||||||
|
conn.create_user_pool_client(UserPoolId=user_pool_id,
|
||||||
|
ClientName=client_name)
|
||||||
|
max_results = 5
|
||||||
|
result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
|
||||||
|
MaxResults=max_results)
|
||||||
|
result["UserPoolClients"].should.have.length_of(max_results)
|
||||||
|
result.should.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_user_pool_clients_returns_next_tokens():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 user pool clients
|
||||||
|
client_count = 10
|
||||||
|
for i in range(client_count):
|
||||||
|
client_name = str(uuid.uuid4())
|
||||||
|
conn.create_user_pool_client(UserPoolId=user_pool_id,
|
||||||
|
ClientName=client_name)
|
||||||
|
max_results = 5
|
||||||
|
result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
|
||||||
|
MaxResults=max_results)
|
||||||
|
result["UserPoolClients"].should.have.length_of(max_results)
|
||||||
|
result.should.have.key("NextToken")
|
||||||
|
|
||||||
|
next_token = result["NextToken"]
|
||||||
|
result_2 = conn.list_user_pool_clients(UserPoolId=user_pool_id,
|
||||||
|
MaxResults=max_results,
|
||||||
|
NextToken=next_token)
|
||||||
|
result_2["UserPoolClients"].should.have.length_of(max_results)
|
||||||
|
result_2.shouldnt.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_user_pool_clients_when_max_items_more_than_total_items():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 user pool clients
|
||||||
|
client_count = 10
|
||||||
|
for i in range(client_count):
|
||||||
|
client_name = str(uuid.uuid4())
|
||||||
|
conn.create_user_pool_client(UserPoolId=user_pool_id,
|
||||||
|
ClientName=client_name)
|
||||||
|
max_results = client_count + 5
|
||||||
|
result = conn.list_user_pool_clients(UserPoolId=user_pool_id,
|
||||||
|
MaxResults=max_results)
|
||||||
|
result["UserPoolClients"].should.have.length_of(client_count)
|
||||||
|
result.shouldnt.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidp
|
@mock_cognitoidp
|
||||||
def test_describe_user_pool_client():
|
def test_describe_user_pool_client():
|
||||||
conn = boto3.client("cognito-idp", "us-west-2")
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
@ -264,6 +379,83 @@ def test_list_identity_providers():
|
|||||||
result["Providers"][0]["ProviderType"].should.equal(provider_type)
|
result["Providers"][0]["ProviderType"].should.equal(provider_type)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_identity_providers_returns_max_items():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 identity providers linked to a user pool
|
||||||
|
identity_provider_count = 10
|
||||||
|
for i in range(identity_provider_count):
|
||||||
|
provider_name = str(uuid.uuid4())
|
||||||
|
provider_type = "Facebook"
|
||||||
|
conn.create_identity_provider(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
ProviderName=provider_name,
|
||||||
|
ProviderType=provider_type,
|
||||||
|
ProviderDetails={},
|
||||||
|
)
|
||||||
|
|
||||||
|
max_results = 5
|
||||||
|
result = conn.list_identity_providers(UserPoolId=user_pool_id,
|
||||||
|
MaxResults=max_results)
|
||||||
|
result["Providers"].should.have.length_of(max_results)
|
||||||
|
result.should.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_identity_providers_returns_next_tokens():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 identity providers linked to a user pool
|
||||||
|
identity_provider_count = 10
|
||||||
|
for i in range(identity_provider_count):
|
||||||
|
provider_name = str(uuid.uuid4())
|
||||||
|
provider_type = "Facebook"
|
||||||
|
conn.create_identity_provider(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
ProviderName=provider_name,
|
||||||
|
ProviderType=provider_type,
|
||||||
|
ProviderDetails={},
|
||||||
|
)
|
||||||
|
|
||||||
|
max_results = 5
|
||||||
|
result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results)
|
||||||
|
result["Providers"].should.have.length_of(max_results)
|
||||||
|
result.should.have.key("NextToken")
|
||||||
|
|
||||||
|
next_token = result["NextToken"]
|
||||||
|
result_2 = conn.list_identity_providers(UserPoolId=user_pool_id,
|
||||||
|
MaxResults=max_results,
|
||||||
|
NextToken=next_token)
|
||||||
|
result_2["Providers"].should.have.length_of(max_results)
|
||||||
|
result_2.shouldnt.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_identity_providers_when_max_items_more_than_total_items():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 identity providers linked to a user pool
|
||||||
|
identity_provider_count = 10
|
||||||
|
for i in range(identity_provider_count):
|
||||||
|
provider_name = str(uuid.uuid4())
|
||||||
|
provider_type = "Facebook"
|
||||||
|
conn.create_identity_provider(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
ProviderName=provider_name,
|
||||||
|
ProviderType=provider_type,
|
||||||
|
ProviderDetails={},
|
||||||
|
)
|
||||||
|
|
||||||
|
max_results = identity_provider_count + 5
|
||||||
|
result = conn.list_identity_providers(UserPoolId=user_pool_id, MaxResults=max_results)
|
||||||
|
result["Providers"].should.have.length_of(identity_provider_count)
|
||||||
|
result.shouldnt.have.key("NextToken")
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidp
|
@mock_cognitoidp
|
||||||
def test_describe_identity_providers():
|
def test_describe_identity_providers():
|
||||||
conn = boto3.client("cognito-idp", "us-west-2")
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
@ -292,6 +484,82 @@ def test_describe_identity_providers():
|
|||||||
result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value)
|
result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(value)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_update_identity_provider():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
provider_name = str(uuid.uuid4())
|
||||||
|
provider_type = "Facebook"
|
||||||
|
value = str(uuid.uuid4())
|
||||||
|
new_value = str(uuid.uuid4())
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
conn.create_identity_provider(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
ProviderName=provider_name,
|
||||||
|
ProviderType=provider_type,
|
||||||
|
ProviderDetails={
|
||||||
|
"thing": value
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
result = conn.update_identity_provider(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
ProviderName=provider_name,
|
||||||
|
ProviderDetails={
|
||||||
|
"thing": new_value
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
result["IdentityProvider"]["UserPoolId"].should.equal(user_pool_id)
|
||||||
|
result["IdentityProvider"]["ProviderName"].should.equal(provider_name)
|
||||||
|
result["IdentityProvider"]["ProviderType"].should.equal(provider_type)
|
||||||
|
result["IdentityProvider"]["ProviderDetails"]["thing"].should.equal(new_value)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_update_identity_provider_no_user_pool():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
new_value = str(uuid.uuid4())
|
||||||
|
|
||||||
|
with assert_raises(conn.exceptions.ResourceNotFoundException) as cm:
|
||||||
|
conn.update_identity_provider(
|
||||||
|
UserPoolId="foo",
|
||||||
|
ProviderName="bar",
|
||||||
|
ProviderDetails={
|
||||||
|
"thing": new_value
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
cm.exception.operation_name.should.equal('UpdateIdentityProvider')
|
||||||
|
cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException')
|
||||||
|
cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_update_identity_provider_no_identity_provider():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
provider_name = str(uuid.uuid4())
|
||||||
|
provider_type = "Facebook"
|
||||||
|
value = str(uuid.uuid4())
|
||||||
|
new_value = str(uuid.uuid4())
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
with assert_raises(conn.exceptions.ResourceNotFoundException) as cm:
|
||||||
|
conn.update_identity_provider(
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
ProviderName="foo",
|
||||||
|
ProviderDetails={
|
||||||
|
"thing": new_value
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
cm.exception.operation_name.should.equal('UpdateIdentityProvider')
|
||||||
|
cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException')
|
||||||
|
cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidp
|
@mock_cognitoidp
|
||||||
def test_delete_identity_providers():
|
def test_delete_identity_providers():
|
||||||
conn = boto3.client("cognito-idp", "us-west-2")
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
@ -323,6 +591,245 @@ def test_delete_identity_providers():
|
|||||||
caught.should.be.true
|
caught.should.be.true
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_create_group():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
description = str(uuid.uuid4())
|
||||||
|
role_arn = "arn:aws:iam:::role/my-iam-role"
|
||||||
|
precedence = random.randint(0, 100000)
|
||||||
|
|
||||||
|
result = conn.create_group(
|
||||||
|
GroupName=group_name,
|
||||||
|
UserPoolId=user_pool_id,
|
||||||
|
Description=description,
|
||||||
|
RoleArn=role_arn,
|
||||||
|
Precedence=precedence,
|
||||||
|
)
|
||||||
|
|
||||||
|
result["Group"]["GroupName"].should.equal(group_name)
|
||||||
|
result["Group"]["UserPoolId"].should.equal(user_pool_id)
|
||||||
|
result["Group"]["Description"].should.equal(description)
|
||||||
|
result["Group"]["RoleArn"].should.equal(role_arn)
|
||||||
|
result["Group"]["Precedence"].should.equal(precedence)
|
||||||
|
result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime")
|
||||||
|
result["Group"]["CreationDate"].should.be.a("datetime.datetime")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_create_group_with_duplicate_name_raises_error():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as cm:
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
cm.exception.operation_name.should.equal('CreateGroup')
|
||||||
|
cm.exception.response['Error']['Code'].should.equal('GroupExistsException')
|
||||||
|
cm.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_get_group():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
result = conn.get_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
result["Group"]["GroupName"].should.equal(group_name)
|
||||||
|
result["Group"]["UserPoolId"].should.equal(user_pool_id)
|
||||||
|
result["Group"]["LastModifiedDate"].should.be.a("datetime.datetime")
|
||||||
|
result["Group"]["CreationDate"].should.be.a("datetime.datetime")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_groups():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
result = conn.list_groups(UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
result["Groups"].should.have.length_of(1)
|
||||||
|
result["Groups"][0]["GroupName"].should.equal(group_name)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_delete_group():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
result = conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as cm:
|
||||||
|
conn.get_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
cm.exception.response['Error']['Code'].should.equal('ResourceNotFoundException')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_admin_add_user_to_group():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
|
||||||
|
result = conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_admin_add_user_to_group_again_is_noop():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_users_in_group():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
|
||||||
|
result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name)
|
||||||
|
|
||||||
|
result["Users"].should.have.length_of(1)
|
||||||
|
result["Users"][0]["Username"].should.equal(username)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_users_in_group_ignores_deleted_user():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
username2 = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username2)
|
||||||
|
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username2, GroupName=group_name)
|
||||||
|
conn.admin_delete_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
|
||||||
|
result = conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name)
|
||||||
|
|
||||||
|
result["Users"].should.have.length_of(1)
|
||||||
|
result["Users"][0]["Username"].should.equal(username2)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_admin_list_groups_for_user():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
|
||||||
|
result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
result["Groups"].should.have.length_of(1)
|
||||||
|
result["Groups"][0]["GroupName"].should.equal(group_name)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_admin_list_groups_for_user_ignores_deleted_group():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
group_name2 = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name2, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name2)
|
||||||
|
conn.delete_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
result = conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
result["Groups"].should.have.length_of(1)
|
||||||
|
result["Groups"][0]["GroupName"].should.equal(group_name2)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_admin_remove_user_from_group():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
|
||||||
|
result = conn.admin_remove_user_from_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
|
||||||
|
conn.list_users_in_group(UserPoolId=user_pool_id, GroupName=group_name) \
|
||||||
|
["Users"].should.have.length_of(0)
|
||||||
|
conn.admin_list_groups_for_user(Username=username, UserPoolId=user_pool_id) \
|
||||||
|
["Groups"].should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_admin_remove_user_from_group_again_is_noop():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
group_name = str(uuid.uuid4())
|
||||||
|
conn.create_group(GroupName=group_name, UserPoolId=user_pool_id)
|
||||||
|
|
||||||
|
username = str(uuid.uuid4())
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||||
|
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
conn.admin_add_user_to_group(UserPoolId=user_pool_id, Username=username, GroupName=group_name)
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidp
|
@mock_cognitoidp
|
||||||
def test_admin_create_user():
|
def test_admin_create_user():
|
||||||
conn = boto3.client("cognito-idp", "us-west-2")
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
@ -396,6 +903,62 @@ def test_list_users():
|
|||||||
result["Users"][0]["Username"].should.equal(username)
|
result["Users"][0]["Username"].should.equal(username)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_users_returns_limit_items():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 users
|
||||||
|
user_count = 10
|
||||||
|
for i in range(user_count):
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id,
|
||||||
|
Username=str(uuid.uuid4()))
|
||||||
|
max_results = 5
|
||||||
|
result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
|
||||||
|
result["Users"].should.have.length_of(max_results)
|
||||||
|
result.should.have.key("PaginationToken")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_users_returns_pagination_tokens():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 users
|
||||||
|
user_count = 10
|
||||||
|
for i in range(user_count):
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id,
|
||||||
|
Username=str(uuid.uuid4()))
|
||||||
|
|
||||||
|
max_results = 5
|
||||||
|
result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
|
||||||
|
result["Users"].should.have.length_of(max_results)
|
||||||
|
result.should.have.key("PaginationToken")
|
||||||
|
|
||||||
|
next_token = result["PaginationToken"]
|
||||||
|
result_2 = conn.list_users(UserPoolId=user_pool_id,
|
||||||
|
Limit=max_results, PaginationToken=next_token)
|
||||||
|
result_2["Users"].should.have.length_of(max_results)
|
||||||
|
result_2.shouldnt.have.key("PaginationToken")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cognitoidp
|
||||||
|
def test_list_users_when_limit_more_than_total_items():
|
||||||
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||||
|
|
||||||
|
# Given 10 users
|
||||||
|
user_count = 10
|
||||||
|
for i in range(user_count):
|
||||||
|
conn.admin_create_user(UserPoolId=user_pool_id,
|
||||||
|
Username=str(uuid.uuid4()))
|
||||||
|
|
||||||
|
max_results = user_count + 5
|
||||||
|
result = conn.list_users(UserPoolId=user_pool_id, Limit=max_results)
|
||||||
|
result["Users"].should.have.length_of(user_count)
|
||||||
|
result.shouldnt.have.key("PaginationToken")
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidp
|
@mock_cognitoidp
|
||||||
def test_admin_disable_user():
|
def test_admin_disable_user():
|
||||||
conn = boto3.client("cognito-idp", "us-west-2")
|
conn = boto3.client("cognito-idp", "us-west-2")
|
||||||
|
491
tests/test_config/test_config.py
Normal file
491
tests/test_config/test_config.py
Normal file
@ -0,0 +1,491 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
|
from moto.config import mock_config
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_put_configuration_recorder():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
|
||||||
|
# Try without a name supplied:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={'roleARN': 'somearn'})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'InvalidConfigurationRecorderNameException'
|
||||||
|
assert 'is not valid, blank string.' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# Try with a really long name:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={'name': 'a' * 257, 'roleARN': 'somearn'})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'ValidationException'
|
||||||
|
assert 'Member must have length less than or equal to 256' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With resource types and flags set to True:
|
||||||
|
bad_groups = [
|
||||||
|
{'allSupported': True, 'includeGlobalResourceTypes': True, 'resourceTypes': ['item']},
|
||||||
|
{'allSupported': False, 'includeGlobalResourceTypes': True, 'resourceTypes': ['item']},
|
||||||
|
{'allSupported': True, 'includeGlobalResourceTypes': False, 'resourceTypes': ['item']},
|
||||||
|
{'allSupported': False, 'includeGlobalResourceTypes': False, 'resourceTypes': []},
|
||||||
|
{'includeGlobalResourceTypes': False, 'resourceTypes': []},
|
||||||
|
{'includeGlobalResourceTypes': True},
|
||||||
|
{'resourceTypes': []},
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
|
||||||
|
for bg in bad_groups:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'default',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': bg
|
||||||
|
})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'InvalidRecordingGroupException'
|
||||||
|
assert ce.exception.response['Error']['Message'] == 'The recording group provided is not valid'
|
||||||
|
|
||||||
|
# With an invalid Resource Type:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'default',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
# 2 good, and 2 bad:
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'LOLNO', 'AWS::EC2::VPC', 'LOLSTILLNO']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'ValidationException'
|
||||||
|
assert "2 validation error detected: Value '['LOLNO', 'LOLSTILLNO']" in str(ce.exception.response['Error']['Message'])
|
||||||
|
assert 'AWS::EC2::Instance' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# Create a proper one:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
result = client.describe_configuration_recorders()['ConfigurationRecorders']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]['name'] == 'testrecorder'
|
||||||
|
assert result[0]['roleARN'] == 'somearn'
|
||||||
|
assert not result[0]['recordingGroup']['allSupported']
|
||||||
|
assert not result[0]['recordingGroup']['includeGlobalResourceTypes']
|
||||||
|
assert len(result[0]['recordingGroup']['resourceTypes']) == 2
|
||||||
|
assert 'AWS::EC2::Volume' in result[0]['recordingGroup']['resourceTypes'] \
|
||||||
|
and 'AWS::EC2::VPC' in result[0]['recordingGroup']['resourceTypes']
|
||||||
|
|
||||||
|
# Now update the configuration recorder:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': True,
|
||||||
|
'includeGlobalResourceTypes': True
|
||||||
|
}
|
||||||
|
})
|
||||||
|
result = client.describe_configuration_recorders()['ConfigurationRecorders']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]['name'] == 'testrecorder'
|
||||||
|
assert result[0]['roleARN'] == 'somearn'
|
||||||
|
assert result[0]['recordingGroup']['allSupported']
|
||||||
|
assert result[0]['recordingGroup']['includeGlobalResourceTypes']
|
||||||
|
assert len(result[0]['recordingGroup']['resourceTypes']) == 0
|
||||||
|
|
||||||
|
# With a default recording group (i.e. lacking one)
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={'name': 'testrecorder', 'roleARN': 'somearn'})
|
||||||
|
result = client.describe_configuration_recorders()['ConfigurationRecorders']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]['name'] == 'testrecorder'
|
||||||
|
assert result[0]['roleARN'] == 'somearn'
|
||||||
|
assert result[0]['recordingGroup']['allSupported']
|
||||||
|
assert not result[0]['recordingGroup']['includeGlobalResourceTypes']
|
||||||
|
assert not result[0]['recordingGroup'].get('resourceTypes')
|
||||||
|
|
||||||
|
# Can currently only have exactly 1 Config Recorder in an account/region:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'someotherrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'MaxNumberOfConfigurationRecordersExceededException'
|
||||||
|
assert "maximum number of configuration recorders: 1 is reached." in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_describe_configurations():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
|
||||||
|
# Without any configurations:
|
||||||
|
result = client.describe_configuration_recorders()
|
||||||
|
assert not result['ConfigurationRecorders']
|
||||||
|
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
result = client.describe_configuration_recorders()['ConfigurationRecorders']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]['name'] == 'testrecorder'
|
||||||
|
assert result[0]['roleARN'] == 'somearn'
|
||||||
|
assert not result[0]['recordingGroup']['allSupported']
|
||||||
|
assert not result[0]['recordingGroup']['includeGlobalResourceTypes']
|
||||||
|
assert len(result[0]['recordingGroup']['resourceTypes']) == 2
|
||||||
|
assert 'AWS::EC2::Volume' in result[0]['recordingGroup']['resourceTypes'] \
|
||||||
|
and 'AWS::EC2::VPC' in result[0]['recordingGroup']['resourceTypes']
|
||||||
|
|
||||||
|
# Specify an incorrect name:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.describe_configuration_recorders(ConfigurationRecorderNames=['wrong'])
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException'
|
||||||
|
assert 'wrong' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# And with both a good and wrong name:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.describe_configuration_recorders(ConfigurationRecorderNames=['testrecorder', 'wrong'])
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException'
|
||||||
|
assert 'wrong' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_delivery_channels():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
|
||||||
|
# Try without a config recorder:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoAvailableConfigurationRecorderException'
|
||||||
|
assert ce.exception.response['Error']['Message'] == 'Configuration recorder is not available to ' \
|
||||||
|
'put delivery channel.'
|
||||||
|
|
||||||
|
# Create a config recorder to continue testing:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Try without a name supplied:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'InvalidDeliveryChannelNameException'
|
||||||
|
assert 'is not valid, blank string.' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# Try with a really long name:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'a' * 257})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'ValidationException'
|
||||||
|
assert 'Member must have length less than or equal to 256' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# Without specifying a bucket name:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'testchannel'})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchBucketException'
|
||||||
|
assert ce.exception.response['Error']['Message'] == 'Cannot find a S3 bucket with an empty bucket name.'
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': ''})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchBucketException'
|
||||||
|
assert ce.exception.response['Error']['Message'] == 'Cannot find a S3 bucket with an empty bucket name.'
|
||||||
|
|
||||||
|
# With an empty string for the S3 key prefix:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={
|
||||||
|
'name': 'testchannel', 's3BucketName': 'somebucket', 's3KeyPrefix': ''})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'InvalidS3KeyPrefixException'
|
||||||
|
assert 'empty s3 key prefix.' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With an empty string for the SNS ARN:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={
|
||||||
|
'name': 'testchannel', 's3BucketName': 'somebucket', 'snsTopicARN': ''})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'InvalidSNSTopicARNException'
|
||||||
|
assert 'The sns topic arn' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With an invalid delivery frequency:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={
|
||||||
|
'name': 'testchannel',
|
||||||
|
's3BucketName': 'somebucket',
|
||||||
|
'configSnapshotDeliveryProperties': {'deliveryFrequency': 'WRONG'}
|
||||||
|
})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'InvalidDeliveryFrequency'
|
||||||
|
assert 'WRONG' in ce.exception.response['Error']['Message']
|
||||||
|
assert 'TwentyFour_Hours' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# Create a proper one:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'})
|
||||||
|
result = client.describe_delivery_channels()['DeliveryChannels']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert len(result[0].keys()) == 2
|
||||||
|
assert result[0]['name'] == 'testchannel'
|
||||||
|
assert result[0]['s3BucketName'] == 'somebucket'
|
||||||
|
|
||||||
|
# Overwrite it with another proper configuration:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={
|
||||||
|
'name': 'testchannel',
|
||||||
|
's3BucketName': 'somebucket',
|
||||||
|
'snsTopicARN': 'sometopicarn',
|
||||||
|
'configSnapshotDeliveryProperties': {'deliveryFrequency': 'TwentyFour_Hours'}
|
||||||
|
})
|
||||||
|
result = client.describe_delivery_channels()['DeliveryChannels']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert len(result[0].keys()) == 4
|
||||||
|
assert result[0]['name'] == 'testchannel'
|
||||||
|
assert result[0]['s3BucketName'] == 'somebucket'
|
||||||
|
assert result[0]['snsTopicARN'] == 'sometopicarn'
|
||||||
|
assert result[0]['configSnapshotDeliveryProperties']['deliveryFrequency'] == 'TwentyFour_Hours'
|
||||||
|
|
||||||
|
# Can only have 1:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'testchannel2', 's3BucketName': 'somebucket'})
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'MaxNumberOfDeliveryChannelsExceededException'
|
||||||
|
assert 'because the maximum number of delivery channels: 1 is reached.' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_describe_delivery_channels():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Without any channels:
|
||||||
|
result = client.describe_delivery_channels()
|
||||||
|
assert not result['DeliveryChannels']
|
||||||
|
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'})
|
||||||
|
result = client.describe_delivery_channels()['DeliveryChannels']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert len(result[0].keys()) == 2
|
||||||
|
assert result[0]['name'] == 'testchannel'
|
||||||
|
assert result[0]['s3BucketName'] == 'somebucket'
|
||||||
|
|
||||||
|
# Overwrite it with another proper configuration:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={
|
||||||
|
'name': 'testchannel',
|
||||||
|
's3BucketName': 'somebucket',
|
||||||
|
'snsTopicARN': 'sometopicarn',
|
||||||
|
'configSnapshotDeliveryProperties': {'deliveryFrequency': 'TwentyFour_Hours'}
|
||||||
|
})
|
||||||
|
result = client.describe_delivery_channels()['DeliveryChannels']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert len(result[0].keys()) == 4
|
||||||
|
assert result[0]['name'] == 'testchannel'
|
||||||
|
assert result[0]['s3BucketName'] == 'somebucket'
|
||||||
|
assert result[0]['snsTopicARN'] == 'sometopicarn'
|
||||||
|
assert result[0]['configSnapshotDeliveryProperties']['deliveryFrequency'] == 'TwentyFour_Hours'
|
||||||
|
|
||||||
|
# Specify an incorrect name:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.describe_delivery_channels(DeliveryChannelNames=['wrong'])
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException'
|
||||||
|
assert 'wrong' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# And with both a good and wrong name:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.describe_delivery_channels(DeliveryChannelNames=['testchannel', 'wrong'])
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException'
|
||||||
|
assert 'wrong' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_start_configuration_recorder():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
|
||||||
|
# Without a config recorder:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.start_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException'
|
||||||
|
|
||||||
|
# Make the config recorder;
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Without a delivery channel:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.start_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoAvailableDeliveryChannelException'
|
||||||
|
|
||||||
|
# Make the delivery channel:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'})
|
||||||
|
|
||||||
|
# Start it:
|
||||||
|
client.start_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
|
||||||
|
# Verify it's enabled:
|
||||||
|
result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus']
|
||||||
|
lower_bound = (datetime.utcnow() - timedelta(minutes=5))
|
||||||
|
assert result[0]['recording']
|
||||||
|
assert result[0]['lastStatus'] == 'PENDING'
|
||||||
|
assert lower_bound < result[0]['lastStartTime'].replace(tzinfo=None) <= datetime.utcnow()
|
||||||
|
assert lower_bound < result[0]['lastStatusChangeTime'].replace(tzinfo=None) <= datetime.utcnow()
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_stop_configuration_recorder():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
|
||||||
|
# Without a config recorder:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException'
|
||||||
|
|
||||||
|
# Make the config recorder;
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Make the delivery channel for creation:
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'})
|
||||||
|
|
||||||
|
# Start it:
|
||||||
|
client.start_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
|
||||||
|
# Verify it's disabled:
|
||||||
|
result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus']
|
||||||
|
lower_bound = (datetime.utcnow() - timedelta(minutes=5))
|
||||||
|
assert not result[0]['recording']
|
||||||
|
assert result[0]['lastStatus'] == 'PENDING'
|
||||||
|
assert lower_bound < result[0]['lastStartTime'].replace(tzinfo=None) <= datetime.utcnow()
|
||||||
|
assert lower_bound < result[0]['lastStopTime'].replace(tzinfo=None) <= datetime.utcnow()
|
||||||
|
assert lower_bound < result[0]['lastStatusChangeTime'].replace(tzinfo=None) <= datetime.utcnow()
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_describe_configuration_recorder_status():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
|
||||||
|
# Without any:
|
||||||
|
result = client.describe_configuration_recorder_status()
|
||||||
|
assert not result['ConfigurationRecordersStatus']
|
||||||
|
|
||||||
|
# Make the config recorder;
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Without specifying a config recorder:
|
||||||
|
result = client.describe_configuration_recorder_status()['ConfigurationRecordersStatus']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]['name'] == 'testrecorder'
|
||||||
|
assert not result[0]['recording']
|
||||||
|
|
||||||
|
# With a proper name:
|
||||||
|
result = client.describe_configuration_recorder_status(
|
||||||
|
ConfigurationRecorderNames=['testrecorder'])['ConfigurationRecordersStatus']
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]['name'] == 'testrecorder'
|
||||||
|
assert not result[0]['recording']
|
||||||
|
|
||||||
|
# Invalid name:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.describe_configuration_recorder_status(ConfigurationRecorderNames=['testrecorder', 'wrong'])
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException'
|
||||||
|
assert 'wrong' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_delete_configuration_recorder():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
|
||||||
|
# Make the config recorder;
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Delete it:
|
||||||
|
client.delete_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
|
||||||
|
# Try again -- it should be deleted:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.delete_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchConfigurationRecorderException'
|
||||||
|
|
||||||
|
|
||||||
|
@mock_config
|
||||||
|
def test_delete_delivery_channel():
|
||||||
|
client = boto3.client('config', region_name='us-west-2')
|
||||||
|
|
||||||
|
# Need a recorder to test the constraint on recording being enabled:
|
||||||
|
client.put_configuration_recorder(ConfigurationRecorder={
|
||||||
|
'name': 'testrecorder',
|
||||||
|
'roleARN': 'somearn',
|
||||||
|
'recordingGroup': {
|
||||||
|
'allSupported': False,
|
||||||
|
'includeGlobalResourceTypes': False,
|
||||||
|
'resourceTypes': ['AWS::EC2::Volume', 'AWS::EC2::VPC']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
client.put_delivery_channel(DeliveryChannel={'name': 'testchannel', 's3BucketName': 'somebucket'})
|
||||||
|
client.start_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
|
||||||
|
# With the recorder enabled:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.delete_delivery_channel(DeliveryChannelName='testchannel')
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'LastDeliveryChannelDeleteFailedException'
|
||||||
|
assert 'because there is a running configuration recorder.' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# Stop recording:
|
||||||
|
client.stop_configuration_recorder(ConfigurationRecorderName='testrecorder')
|
||||||
|
|
||||||
|
# Try again:
|
||||||
|
client.delete_delivery_channel(DeliveryChannelName='testchannel')
|
||||||
|
|
||||||
|
# Verify:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.delete_delivery_channel(DeliveryChannelName='testchannel')
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException'
|
@ -815,6 +815,16 @@ def test_scan_filter():
|
|||||||
)
|
)
|
||||||
assert response['Count'] == 1
|
assert response['Count'] == 1
|
||||||
|
|
||||||
|
response = table.scan(
|
||||||
|
FilterExpression=Attr('app').ne('app2')
|
||||||
|
)
|
||||||
|
assert response['Count'] == 1
|
||||||
|
|
||||||
|
response = table.scan(
|
||||||
|
FilterExpression=Attr('app').ne('app1')
|
||||||
|
)
|
||||||
|
assert response['Count'] == 0
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_scan_filter2():
|
def test_scan_filter2():
|
||||||
@ -872,6 +882,26 @@ def test_scan_filter3():
|
|||||||
)
|
)
|
||||||
assert response['Count'] == 1
|
assert response['Count'] == 1
|
||||||
|
|
||||||
|
response = table.scan(
|
||||||
|
FilterExpression=Attr('active').ne(True)
|
||||||
|
)
|
||||||
|
assert response['Count'] == 0
|
||||||
|
|
||||||
|
response = table.scan(
|
||||||
|
FilterExpression=Attr('active').ne(False)
|
||||||
|
)
|
||||||
|
assert response['Count'] == 1
|
||||||
|
|
||||||
|
response = table.scan(
|
||||||
|
FilterExpression=Attr('app').ne(1)
|
||||||
|
)
|
||||||
|
assert response['Count'] == 0
|
||||||
|
|
||||||
|
response = table.scan(
|
||||||
|
FilterExpression=Attr('app').ne(2)
|
||||||
|
)
|
||||||
|
assert response['Count'] == 1
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_scan_filter4():
|
def test_scan_filter4():
|
||||||
@ -919,6 +949,33 @@ def test_bad_scan_filter():
|
|||||||
raise RuntimeError('Should of raised ResourceInUseException')
|
raise RuntimeError('Should of raised ResourceInUseException')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_create_table_pay_per_request():
|
||||||
|
client = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
client.create_table(
|
||||||
|
TableName='test1',
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}],
|
||||||
|
KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}],
|
||||||
|
BillingMode="PAY_PER_REQUEST"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_create_table_error_pay_per_request_with_provisioned_param():
|
||||||
|
client = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
try:
|
||||||
|
client.create_table(
|
||||||
|
TableName='test1',
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}],
|
||||||
|
KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123},
|
||||||
|
BillingMode="PAY_PER_REQUEST"
|
||||||
|
)
|
||||||
|
except ClientError as err:
|
||||||
|
err.response['Error']['Code'].should.equal('ValidationException')
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_duplicate_create():
|
def test_duplicate_create():
|
||||||
client = boto3.client('dynamodb', region_name='us-east-1')
|
client = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
@ -1000,6 +1057,11 @@ def test_delete_item():
|
|||||||
response = table.scan()
|
response = table.scan()
|
||||||
assert response['Count'] == 2
|
assert response['Count'] == 2
|
||||||
|
|
||||||
|
# Test ReturnValues validation
|
||||||
|
with assert_raises(ClientError) as ex:
|
||||||
|
table.delete_item(Key={'client': 'client1', 'app': 'app1'},
|
||||||
|
ReturnValues='ALL_NEW')
|
||||||
|
|
||||||
# Test deletion and returning old value
|
# Test deletion and returning old value
|
||||||
response = table.delete_item(Key={'client': 'client1', 'app': 'app1'}, ReturnValues='ALL_OLD')
|
response = table.delete_item(Key={'client': 'client1', 'app': 'app1'}, ReturnValues='ALL_OLD')
|
||||||
response['Attributes'].should.contain('client')
|
response['Attributes'].should.contain('client')
|
||||||
@ -1246,6 +1308,81 @@ def test_update_if_not_exists():
|
|||||||
assert resp['Items'][0]['created_at'] == 123
|
assert resp['Items'][0]['created_at'] == 123
|
||||||
|
|
||||||
|
|
||||||
|
# https://github.com/spulec/moto/issues/1937
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_update_return_attributes():
|
||||||
|
dynamodb = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
dynamodb.create_table(
|
||||||
|
TableName='moto-test',
|
||||||
|
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(col, to, rv):
|
||||||
|
return dynamodb.update_item(
|
||||||
|
TableName='moto-test',
|
||||||
|
Key={'id': {'S': 'foo'}},
|
||||||
|
AttributeUpdates={col: {'Value': {'S': to}, 'Action': 'PUT'}},
|
||||||
|
ReturnValues=rv
|
||||||
|
)
|
||||||
|
|
||||||
|
r = update('col1', 'val1', 'ALL_NEW')
|
||||||
|
assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
|
||||||
|
|
||||||
|
r = update('col1', 'val2', 'ALL_OLD')
|
||||||
|
assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
|
||||||
|
|
||||||
|
r = update('col2', 'val3', 'UPDATED_NEW')
|
||||||
|
assert r['Attributes'] == {'col2': {'S': 'val3'}}
|
||||||
|
|
||||||
|
r = update('col2', 'val4', 'UPDATED_OLD')
|
||||||
|
assert r['Attributes'] == {'col2': {'S': 'val3'}}
|
||||||
|
|
||||||
|
r = update('col1', 'val5', 'NONE')
|
||||||
|
assert r['Attributes'] == {}
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as ex:
|
||||||
|
r = update('col1', 'val6', 'WRONG')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_put_return_attributes():
|
||||||
|
dynamodb = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
dynamodb.create_table(
|
||||||
|
TableName='moto-test',
|
||||||
|
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}
|
||||||
|
)
|
||||||
|
|
||||||
|
r = dynamodb.put_item(
|
||||||
|
TableName='moto-test',
|
||||||
|
Item={'id': {'S': 'foo'}, 'col1': {'S': 'val1'}},
|
||||||
|
ReturnValues='NONE'
|
||||||
|
)
|
||||||
|
assert 'Attributes' not in r
|
||||||
|
|
||||||
|
r = dynamodb.put_item(
|
||||||
|
TableName='moto-test',
|
||||||
|
Item={'id': {'S': 'foo'}, 'col1': {'S': 'val2'}},
|
||||||
|
ReturnValues='ALL_OLD'
|
||||||
|
)
|
||||||
|
assert r['Attributes'] == {'id': {'S': 'foo'}, 'col1': {'S': 'val1'}}
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as ex:
|
||||||
|
dynamodb.put_item(
|
||||||
|
TableName='moto-test',
|
||||||
|
Item={'id': {'S': 'foo'}, 'col1': {'S': 'val3'}},
|
||||||
|
ReturnValues='ALL_NEW'
|
||||||
|
)
|
||||||
|
ex.exception.response['Error']['Code'].should.equal('ValidationException')
|
||||||
|
ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
|
||||||
|
ex.exception.response['Error']['Message'].should.equal('Return values set to invalid value')
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_query_global_secondary_index_when_created_via_update_table_resource():
|
def test_query_global_secondary_index_when_created_via_update_table_resource():
|
||||||
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
|
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
|
||||||
@ -1336,3 +1473,236 @@ def test_query_global_secondary_index_when_created_via_update_table_resource():
|
|||||||
assert len(forum_and_subject_items) == 1
|
assert len(forum_and_subject_items) == 1
|
||||||
assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats',
|
assert forum_and_subject_items[0] == {'user_id': Decimal('1'), 'forum_name': 'cats',
|
||||||
'subject': 'my pet is the cutest'}
|
'subject': 'my pet is the cutest'}
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_dynamodb_streams_1():
|
||||||
|
conn = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = conn.create_table(
|
||||||
|
TableName='test-streams',
|
||||||
|
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
|
||||||
|
StreamSpecification={
|
||||||
|
'StreamEnabled': True,
|
||||||
|
'StreamViewType': 'NEW_AND_OLD_IMAGES'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'StreamSpecification' in resp['TableDescription']
|
||||||
|
assert resp['TableDescription']['StreamSpecification'] == {
|
||||||
|
'StreamEnabled': True,
|
||||||
|
'StreamViewType': 'NEW_AND_OLD_IMAGES'
|
||||||
|
}
|
||||||
|
assert 'LatestStreamLabel' in resp['TableDescription']
|
||||||
|
assert 'LatestStreamArn' in resp['TableDescription']
|
||||||
|
|
||||||
|
resp = conn.delete_table(TableName='test-streams')
|
||||||
|
|
||||||
|
assert 'StreamSpecification' in resp['TableDescription']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_dynamodb_streams_2():
|
||||||
|
conn = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = conn.create_table(
|
||||||
|
TableName='test-stream-update',
|
||||||
|
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'StreamSpecification' not in resp['TableDescription']
|
||||||
|
|
||||||
|
resp = conn.update_table(
|
||||||
|
TableName='test-stream-update',
|
||||||
|
StreamSpecification={
|
||||||
|
'StreamEnabled': True,
|
||||||
|
'StreamViewType': 'NEW_IMAGE'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'StreamSpecification' in resp['TableDescription']
|
||||||
|
assert resp['TableDescription']['StreamSpecification'] == {
|
||||||
|
'StreamEnabled': True,
|
||||||
|
'StreamViewType': 'NEW_IMAGE'
|
||||||
|
}
|
||||||
|
assert 'LatestStreamLabel' in resp['TableDescription']
|
||||||
|
assert 'LatestStreamArn' in resp['TableDescription']
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_condition_expressions():
|
||||||
|
client = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
# Create the DynamoDB table.
|
||||||
|
client.create_table(
|
||||||
|
TableName='test1',
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'client', 'AttributeType': 'S'}, {'AttributeName': 'app', 'AttributeType': 'S'}],
|
||||||
|
KeySchema=[{'AttributeName': 'client', 'KeyType': 'HASH'}, {'AttributeName': 'app', 'KeyType': 'RANGE'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 123, 'WriteCapacityUnits': 123}
|
||||||
|
)
|
||||||
|
client.put_item(
|
||||||
|
TableName='test1',
|
||||||
|
Item={
|
||||||
|
'client': {'S': 'client1'},
|
||||||
|
'app': {'S': 'app1'},
|
||||||
|
'match': {'S': 'match'},
|
||||||
|
'existing': {'S': 'existing'},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
client.put_item(
|
||||||
|
TableName='test1',
|
||||||
|
Item={
|
||||||
|
'client': {'S': 'client1'},
|
||||||
|
'app': {'S': 'app1'},
|
||||||
|
'match': {'S': 'match'},
|
||||||
|
'existing': {'S': 'existing'},
|
||||||
|
},
|
||||||
|
ConditionExpression='attribute_exists(#existing) AND attribute_not_exists(#nonexistent) AND #match = :match',
|
||||||
|
ExpressionAttributeNames={
|
||||||
|
'#existing': 'existing',
|
||||||
|
'#nonexistent': 'nope',
|
||||||
|
'#match': 'match',
|
||||||
|
},
|
||||||
|
ExpressionAttributeValues={
|
||||||
|
':match': {'S': 'match'}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
client.put_item(
|
||||||
|
TableName='test1',
|
||||||
|
Item={
|
||||||
|
'client': {'S': 'client1'},
|
||||||
|
'app': {'S': 'app1'},
|
||||||
|
'match': {'S': 'match'},
|
||||||
|
'existing': {'S': 'existing'},
|
||||||
|
},
|
||||||
|
ConditionExpression='NOT(attribute_exists(#nonexistent1) AND attribute_exists(#nonexistent2))',
|
||||||
|
ExpressionAttributeNames={
|
||||||
|
'#nonexistent1': 'nope',
|
||||||
|
'#nonexistent2': 'nope2'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||||
|
client.put_item(
|
||||||
|
TableName='test1',
|
||||||
|
Item={
|
||||||
|
'client': {'S': 'client1'},
|
||||||
|
'app': {'S': 'app1'},
|
||||||
|
'match': {'S': 'match'},
|
||||||
|
'existing': {'S': 'existing'},
|
||||||
|
},
|
||||||
|
ConditionExpression='attribute_exists(#nonexistent1) AND attribute_exists(#nonexistent2)',
|
||||||
|
ExpressionAttributeNames={
|
||||||
|
'#nonexistent1': 'nope',
|
||||||
|
'#nonexistent2': 'nope2'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||||
|
client.put_item(
|
||||||
|
TableName='test1',
|
||||||
|
Item={
|
||||||
|
'client': {'S': 'client1'},
|
||||||
|
'app': {'S': 'app1'},
|
||||||
|
'match': {'S': 'match'},
|
||||||
|
'existing': {'S': 'existing'},
|
||||||
|
},
|
||||||
|
ConditionExpression='NOT(attribute_not_exists(#nonexistent1) AND attribute_not_exists(#nonexistent2))',
|
||||||
|
ExpressionAttributeNames={
|
||||||
|
'#nonexistent1': 'nope',
|
||||||
|
'#nonexistent2': 'nope2'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.ConditionalCheckFailedException):
|
||||||
|
client.put_item(
|
||||||
|
TableName='test1',
|
||||||
|
Item={
|
||||||
|
'client': {'S': 'client1'},
|
||||||
|
'app': {'S': 'app1'},
|
||||||
|
'match': {'S': 'match'},
|
||||||
|
'existing': {'S': 'existing'},
|
||||||
|
},
|
||||||
|
ConditionExpression='attribute_exists(#existing) AND attribute_not_exists(#nonexistent) AND #match = :match',
|
||||||
|
ExpressionAttributeNames={
|
||||||
|
'#existing': 'existing',
|
||||||
|
'#nonexistent': 'nope',
|
||||||
|
'#match': 'match',
|
||||||
|
},
|
||||||
|
ExpressionAttributeValues={
|
||||||
|
':match': {'S': 'match2'}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_query_gsi_with_range_key():
|
||||||
|
dynamodb = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
dynamodb.create_table(
|
||||||
|
TableName='test',
|
||||||
|
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
|
||||||
|
AttributeDefinitions=[
|
||||||
|
{'AttributeName': 'id', 'AttributeType': 'S'},
|
||||||
|
{'AttributeName': 'gsi_hash_key', 'AttributeType': 'S'},
|
||||||
|
{'AttributeName': 'gsi_range_key', 'AttributeType': 'S'}
|
||||||
|
],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
|
||||||
|
GlobalSecondaryIndexes=[
|
||||||
|
{
|
||||||
|
'IndexName': 'test_gsi',
|
||||||
|
'KeySchema': [
|
||||||
|
{
|
||||||
|
'AttributeName': 'gsi_hash_key',
|
||||||
|
'KeyType': 'HASH'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'AttributeName': 'gsi_range_key',
|
||||||
|
'KeyType': 'RANGE'
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'Projection': {
|
||||||
|
'ProjectionType': 'ALL',
|
||||||
|
},
|
||||||
|
'ProvisionedThroughput': {
|
||||||
|
'ReadCapacityUnits': 1,
|
||||||
|
'WriteCapacityUnits': 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
dynamodb.put_item(
|
||||||
|
TableName='test',
|
||||||
|
Item={
|
||||||
|
'id': {'S': 'test1'},
|
||||||
|
'gsi_hash_key': {'S': 'key1'},
|
||||||
|
'gsi_range_key': {'S': 'range1'},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
dynamodb.put_item(
|
||||||
|
TableName='test',
|
||||||
|
Item={
|
||||||
|
'id': {'S': 'test2'},
|
||||||
|
'gsi_hash_key': {'S': 'key1'},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
res = dynamodb.query(TableName='test', IndexName='test_gsi',
|
||||||
|
KeyConditionExpression='gsi_hash_key = :gsi_hash_key AND gsi_range_key = :gsi_range_key',
|
||||||
|
ExpressionAttributeValues={
|
||||||
|
':gsi_hash_key': {'S': 'key1'},
|
||||||
|
':gsi_range_key': {'S': 'range1'}
|
||||||
|
})
|
||||||
|
res.should.have.key("Count").equal(1)
|
||||||
|
res.should.have.key("Items")
|
||||||
|
res['Items'][0].should.equal({
|
||||||
|
'id': {'S': 'test1'},
|
||||||
|
'gsi_hash_key': {'S': 'key1'},
|
||||||
|
'gsi_range_key': {'S': 'range1'},
|
||||||
|
})
|
||||||
|
@ -750,6 +750,47 @@ def test_boto3_update_item_conditions_pass_because_expect_exists_by_compare_to_n
|
|||||||
returned_item = table.get_item(Key={'username': 'johndoe'})
|
returned_item = table.get_item(Key={'username': 'johndoe'})
|
||||||
assert dict(returned_item)['Item']['foo'].should.equal("baz")
|
assert dict(returned_item)['Item']['foo'].should.equal("baz")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_boto3_update_settype_item_with_conditions():
|
||||||
|
class OrderedSet(set):
|
||||||
|
"""A set with predictable iteration order"""
|
||||||
|
def __init__(self, values):
|
||||||
|
super(OrderedSet, self).__init__(values)
|
||||||
|
self.__ordered_values = values
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.__ordered_values)
|
||||||
|
|
||||||
|
table = _create_user_table()
|
||||||
|
table.put_item(Item={'username': 'johndoe'})
|
||||||
|
table.update_item(
|
||||||
|
Key={'username': 'johndoe'},
|
||||||
|
UpdateExpression='SET foo=:new_value',
|
||||||
|
ExpressionAttributeValues={
|
||||||
|
':new_value': OrderedSet(['hello', 'world']),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
table.update_item(
|
||||||
|
Key={'username': 'johndoe'},
|
||||||
|
UpdateExpression='SET foo=:new_value',
|
||||||
|
ExpressionAttributeValues={
|
||||||
|
':new_value': set(['baz']),
|
||||||
|
},
|
||||||
|
Expected={
|
||||||
|
'foo': {
|
||||||
|
'ComparisonOperator': 'EQ',
|
||||||
|
'AttributeValueList': [
|
||||||
|
OrderedSet(['world', 'hello']), # Opposite order to original
|
||||||
|
],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
returned_item = table.get_item(Key={'username': 'johndoe'})
|
||||||
|
assert dict(returned_item)['Item']['foo'].should.equal(set(['baz']))
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_boto3_put_item_conditions_pass():
|
def test_boto3_put_item_conditions_pass():
|
||||||
table = _create_user_table()
|
table = _create_user_table()
|
||||||
|
234
tests/test_dynamodbstreams/test_dynamodbstreams.py
Normal file
234
tests/test_dynamodbstreams/test_dynamodbstreams.py
Normal file
@ -0,0 +1,234 @@
|
|||||||
|
from __future__ import unicode_literals, print_function
|
||||||
|
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
from moto import mock_dynamodb2, mock_dynamodbstreams
|
||||||
|
|
||||||
|
|
||||||
|
class TestCore():
|
||||||
|
stream_arn = None
|
||||||
|
mocks = []
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()]
|
||||||
|
for m in self.mocks:
|
||||||
|
m.start()
|
||||||
|
|
||||||
|
# create a table with a stream
|
||||||
|
conn = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = conn.create_table(
|
||||||
|
TableName='test-streams',
|
||||||
|
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'id',
|
||||||
|
'AttributeType': 'S'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 1,
|
||||||
|
'WriteCapacityUnits': 1},
|
||||||
|
StreamSpecification={
|
||||||
|
'StreamEnabled': True,
|
||||||
|
'StreamViewType': 'NEW_AND_OLD_IMAGES'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.stream_arn = resp['TableDescription']['LatestStreamArn']
|
||||||
|
|
||||||
|
def teardown(self):
|
||||||
|
conn = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
conn.delete_table(TableName='test-streams')
|
||||||
|
self.stream_arn = None
|
||||||
|
|
||||||
|
for m in self.mocks:
|
||||||
|
m.stop()
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_stream(self):
|
||||||
|
conn = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
resp = conn.describe_table(TableName='test-streams')
|
||||||
|
assert 'LatestStreamArn' in resp['Table']
|
||||||
|
|
||||||
|
def test_describe_stream(self):
|
||||||
|
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = conn.describe_stream(StreamArn=self.stream_arn)
|
||||||
|
assert 'StreamDescription' in resp
|
||||||
|
desc = resp['StreamDescription']
|
||||||
|
assert desc['StreamArn'] == self.stream_arn
|
||||||
|
assert desc['TableName'] == 'test-streams'
|
||||||
|
|
||||||
|
def test_list_streams(self):
|
||||||
|
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = conn.list_streams()
|
||||||
|
assert resp['Streams'][0]['StreamArn'] == self.stream_arn
|
||||||
|
|
||||||
|
resp = conn.list_streams(TableName='no-stream')
|
||||||
|
assert not resp['Streams']
|
||||||
|
|
||||||
|
def test_get_shard_iterator(self):
|
||||||
|
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = conn.describe_stream(StreamArn=self.stream_arn)
|
||||||
|
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
|
||||||
|
|
||||||
|
resp = conn.get_shard_iterator(
|
||||||
|
StreamArn=self.stream_arn,
|
||||||
|
ShardId=shard_id,
|
||||||
|
ShardIteratorType='TRIM_HORIZON'
|
||||||
|
)
|
||||||
|
assert 'ShardIterator' in resp
|
||||||
|
|
||||||
|
def test_get_records_empty(self):
|
||||||
|
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = conn.describe_stream(StreamArn=self.stream_arn)
|
||||||
|
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
|
||||||
|
|
||||||
|
resp = conn.get_shard_iterator(
|
||||||
|
StreamArn=self.stream_arn,
|
||||||
|
ShardId=shard_id,
|
||||||
|
ShardIteratorType='LATEST'
|
||||||
|
)
|
||||||
|
iterator_id = resp['ShardIterator']
|
||||||
|
|
||||||
|
resp = conn.get_records(ShardIterator=iterator_id)
|
||||||
|
assert 'Records' in resp
|
||||||
|
assert len(resp['Records']) == 0
|
||||||
|
|
||||||
|
def test_get_records_seq(self):
|
||||||
|
conn = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
conn.put_item(
|
||||||
|
TableName='test-streams',
|
||||||
|
Item={
|
||||||
|
'id': {'S': 'entry1'},
|
||||||
|
'first_col': {'S': 'foo'}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
conn.put_item(
|
||||||
|
TableName='test-streams',
|
||||||
|
Item={
|
||||||
|
'id': {'S': 'entry1'},
|
||||||
|
'first_col': {'S': 'bar'},
|
||||||
|
'second_col': {'S': 'baz'}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
conn.delete_item(
|
||||||
|
TableName='test-streams',
|
||||||
|
Key={'id': {'S': 'entry1'}}
|
||||||
|
)
|
||||||
|
|
||||||
|
conn = boto3.client('dynamodbstreams', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = conn.describe_stream(StreamArn=self.stream_arn)
|
||||||
|
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
|
||||||
|
|
||||||
|
resp = conn.get_shard_iterator(
|
||||||
|
StreamArn=self.stream_arn,
|
||||||
|
ShardId=shard_id,
|
||||||
|
ShardIteratorType='TRIM_HORIZON'
|
||||||
|
)
|
||||||
|
iterator_id = resp['ShardIterator']
|
||||||
|
|
||||||
|
resp = conn.get_records(ShardIterator=iterator_id)
|
||||||
|
assert len(resp['Records']) == 3
|
||||||
|
assert resp['Records'][0]['eventName'] == 'INSERT'
|
||||||
|
assert resp['Records'][1]['eventName'] == 'MODIFY'
|
||||||
|
assert resp['Records'][2]['eventName'] == 'DELETE'
|
||||||
|
|
||||||
|
# now try fetching from the next shard iterator, it should be
|
||||||
|
# empty
|
||||||
|
resp = conn.get_records(ShardIterator=resp['NextShardIterator'])
|
||||||
|
assert len(resp['Records']) == 0
|
||||||
|
|
||||||
|
|
||||||
|
class TestEdges():
|
||||||
|
mocks = []
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
self.mocks = [mock_dynamodb2(), mock_dynamodbstreams()]
|
||||||
|
for m in self.mocks:
|
||||||
|
m.start()
|
||||||
|
|
||||||
|
def teardown(self):
|
||||||
|
for m in self.mocks:
|
||||||
|
m.stop()
|
||||||
|
|
||||||
|
|
||||||
|
def test_enable_stream_on_table(self):
|
||||||
|
conn = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
resp = conn.create_table(
|
||||||
|
TableName='test-streams',
|
||||||
|
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'id',
|
||||||
|
'AttributeType': 'S'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 1,
|
||||||
|
'WriteCapacityUnits': 1}
|
||||||
|
)
|
||||||
|
assert 'StreamSpecification' not in resp['TableDescription']
|
||||||
|
|
||||||
|
resp = conn.update_table(
|
||||||
|
TableName='test-streams',
|
||||||
|
StreamSpecification={
|
||||||
|
'StreamViewType': 'KEYS_ONLY'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
assert 'StreamSpecification' in resp['TableDescription']
|
||||||
|
assert resp['TableDescription']['StreamSpecification'] == {
|
||||||
|
'StreamEnabled': True,
|
||||||
|
'StreamViewType': 'KEYS_ONLY'
|
||||||
|
}
|
||||||
|
assert 'LatestStreamLabel' in resp['TableDescription']
|
||||||
|
|
||||||
|
# now try to enable it again
|
||||||
|
with assert_raises(conn.exceptions.ResourceInUseException):
|
||||||
|
resp = conn.update_table(
|
||||||
|
TableName='test-streams',
|
||||||
|
StreamSpecification={
|
||||||
|
'StreamViewType': 'OLD_IMAGES'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_with_range_key(self):
|
||||||
|
dyn = boto3.client('dynamodb', region_name='us-east-1')
|
||||||
|
|
||||||
|
resp = dyn.create_table(
|
||||||
|
TableName='test-streams',
|
||||||
|
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'},
|
||||||
|
{'AttributeName': 'color', 'KeyType': 'RANGE'}],
|
||||||
|
AttributeDefinitions=[{'AttributeName': 'id',
|
||||||
|
'AttributeType': 'S'},
|
||||||
|
{'AttributeName': 'color',
|
||||||
|
'AttributeType': 'S'}],
|
||||||
|
ProvisionedThroughput={'ReadCapacityUnits': 1,
|
||||||
|
'WriteCapacityUnits': 1},
|
||||||
|
StreamSpecification={
|
||||||
|
'StreamViewType': 'NEW_IMAGES'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
stream_arn = resp['TableDescription']['LatestStreamArn']
|
||||||
|
|
||||||
|
streams = boto3.client('dynamodbstreams', region_name='us-east-1')
|
||||||
|
resp = streams.describe_stream(StreamArn=stream_arn)
|
||||||
|
shard_id = resp['StreamDescription']['Shards'][0]['ShardId']
|
||||||
|
|
||||||
|
resp = streams.get_shard_iterator(
|
||||||
|
StreamArn=stream_arn,
|
||||||
|
ShardId=shard_id,
|
||||||
|
ShardIteratorType='LATEST'
|
||||||
|
)
|
||||||
|
iterator_id = resp['ShardIterator']
|
||||||
|
|
||||||
|
dyn.put_item(
|
||||||
|
TableName='test-streams',
|
||||||
|
Item={'id': {'S': 'row1'}, 'color': {'S': 'blue'}}
|
||||||
|
)
|
||||||
|
dyn.put_item(
|
||||||
|
TableName='test-streams',
|
||||||
|
Item={'id': {'S': 'row2'}, 'color': {'S': 'green'}}
|
||||||
|
)
|
||||||
|
|
||||||
|
resp = streams.get_records(ShardIterator=iterator_id)
|
||||||
|
assert len(resp['Records']) == 2
|
||||||
|
assert resp['Records'][0]['eventName'] == 'INSERT'
|
||||||
|
assert resp['Records'][1]['eventName'] == 'INSERT'
|
||||||
|
|
@ -258,11 +258,11 @@ def test_ami_filters():
|
|||||||
amis_by_name = conn.get_all_images(filters={'name': imageA.name})
|
amis_by_name = conn.get_all_images(filters={'name': imageA.name})
|
||||||
set([ami.id for ami in amis_by_name]).should.equal(set([imageA.id]))
|
set([ami.id for ami in amis_by_name]).should.equal(set([imageA.id]))
|
||||||
|
|
||||||
amis_by_public = conn.get_all_images(filters={'is-public': True})
|
amis_by_public = conn.get_all_images(filters={'is-public': 'true'})
|
||||||
set([ami.id for ami in amis_by_public]).should.contain(imageB.id)
|
set([ami.id for ami in amis_by_public]).should.contain(imageB.id)
|
||||||
len(amis_by_public).should.equal(35)
|
len(amis_by_public).should.equal(35)
|
||||||
|
|
||||||
amis_by_nonpublic = conn.get_all_images(filters={'is-public': False})
|
amis_by_nonpublic = conn.get_all_images(filters={'is-public': 'false'})
|
||||||
set([ami.id for ami in amis_by_nonpublic]).should.contain(imageA.id)
|
set([ami.id for ami in amis_by_nonpublic]).should.contain(imageA.id)
|
||||||
len(amis_by_nonpublic).should.equal(1)
|
len(amis_by_nonpublic).should.equal(1)
|
||||||
|
|
||||||
|
@ -589,6 +589,18 @@ def test_volume_tag_escaping():
|
|||||||
dict(snaps[0].tags).should.equal({'key': '</closed>'})
|
dict(snaps[0].tags).should.equal({'key': '</closed>'})
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
def test_volume_property_hidden_when_no_tags_exist():
|
||||||
|
ec2_client = boto3.client('ec2', region_name='us-east-1')
|
||||||
|
|
||||||
|
volume_response = ec2_client.create_volume(
|
||||||
|
Size=10,
|
||||||
|
AvailabilityZone='us-east-1a'
|
||||||
|
)
|
||||||
|
|
||||||
|
volume_response.get('Tags').should.equal(None)
|
||||||
|
|
||||||
|
|
||||||
@freeze_time
|
@freeze_time
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_copy_snapshot():
|
def test_copy_snapshot():
|
||||||
|
@ -36,7 +36,8 @@ def test_elastic_network_interfaces():
|
|||||||
all_enis.should.have.length_of(1)
|
all_enis.should.have.length_of(1)
|
||||||
eni = all_enis[0]
|
eni = all_enis[0]
|
||||||
eni.groups.should.have.length_of(0)
|
eni.groups.should.have.length_of(0)
|
||||||
eni.private_ip_addresses.should.have.length_of(0)
|
eni.private_ip_addresses.should.have.length_of(1)
|
||||||
|
eni.private_ip_addresses[0].private_ip_address.startswith('10.').should.be.true
|
||||||
|
|
||||||
with assert_raises(EC2ResponseError) as ex:
|
with assert_raises(EC2ResponseError) as ex:
|
||||||
conn.delete_network_interface(eni.id, dry_run=True)
|
conn.delete_network_interface(eni.id, dry_run=True)
|
||||||
@ -354,9 +355,13 @@ def test_elastic_network_interfaces_cloudformation():
|
|||||||
)
|
)
|
||||||
ec2_conn = boto.ec2.connect_to_region("us-west-1")
|
ec2_conn = boto.ec2.connect_to_region("us-west-1")
|
||||||
eni = ec2_conn.get_all_network_interfaces()[0]
|
eni = ec2_conn.get_all_network_interfaces()[0]
|
||||||
|
eni.private_ip_addresses.should.have.length_of(1)
|
||||||
|
|
||||||
stack = conn.describe_stacks()[0]
|
stack = conn.describe_stacks()[0]
|
||||||
resources = stack.describe_resources()
|
resources = stack.describe_resources()
|
||||||
cfn_eni = [resource for resource in resources if resource.resource_type ==
|
cfn_eni = [resource for resource in resources if resource.resource_type ==
|
||||||
'AWS::EC2::NetworkInterface'][0]
|
'AWS::EC2::NetworkInterface'][0]
|
||||||
cfn_eni.physical_resource_id.should.equal(eni.id)
|
cfn_eni.physical_resource_id.should.equal(eni.id)
|
||||||
|
|
||||||
|
outputs = {output.key: output.value for output in stack.outputs}
|
||||||
|
outputs['ENIIpAddress'].should.equal(eni.private_ip_addresses[0].private_ip_address)
|
||||||
|
@ -1254,3 +1254,18 @@ def test_create_instance_ebs_optimized():
|
|||||||
)
|
)
|
||||||
instance.load()
|
instance.load()
|
||||||
instance.ebs_optimized.should.be(False)
|
instance.ebs_optimized.should.be(False)
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
def test_run_multiple_instances_in_same_command():
|
||||||
|
instance_count = 4
|
||||||
|
client = boto3.client('ec2', region_name='us-east-1')
|
||||||
|
client.run_instances(ImageId='ami-1234abcd',
|
||||||
|
MinCount=instance_count,
|
||||||
|
MaxCount=instance_count)
|
||||||
|
reservations = client.describe_instances()['Reservations']
|
||||||
|
|
||||||
|
reservations[0]['Instances'].should.have.length_of(instance_count)
|
||||||
|
|
||||||
|
instances = reservations[0]['Instances']
|
||||||
|
for i in range(0, instance_count):
|
||||||
|
instances[i]['AmiLaunchIndex'].should.be(i)
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
|
import boto3
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2_deprecated
|
from moto import mock_ec2_deprecated, mock_ec2
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2_deprecated
|
@mock_ec2_deprecated
|
||||||
@ -173,3 +174,43 @@ def test_network_acl_tagging():
|
|||||||
if na.id == network_acl.id)
|
if na.id == network_acl.id)
|
||||||
test_network_acl.tags.should.have.length_of(1)
|
test_network_acl.tags.should.have.length_of(1)
|
||||||
test_network_acl.tags["a key"].should.equal("some value")
|
test_network_acl.tags["a key"].should.equal("some value")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
def test_new_subnet_in_new_vpc_associates_with_default_network_acl():
|
||||||
|
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||||
|
new_vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||||
|
new_vpc.reload()
|
||||||
|
|
||||||
|
subnet = ec2.create_subnet(VpcId=new_vpc.id, CidrBlock='10.0.0.0/24')
|
||||||
|
subnet.reload()
|
||||||
|
|
||||||
|
new_vpcs_default_network_acl = next(iter(new_vpc.network_acls.all()), None)
|
||||||
|
new_vpcs_default_network_acl.reload()
|
||||||
|
new_vpcs_default_network_acl.vpc_id.should.equal(new_vpc.id)
|
||||||
|
new_vpcs_default_network_acl.associations.should.have.length_of(1)
|
||||||
|
new_vpcs_default_network_acl.associations[0]['SubnetId'].should.equal(subnet.id)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
def test_default_network_acl_default_entries():
|
||||||
|
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||||
|
default_network_acl = next(iter(ec2.network_acls.all()), None)
|
||||||
|
default_network_acl.is_default.should.be.ok
|
||||||
|
|
||||||
|
default_network_acl.entries.should.have.length_of(4)
|
||||||
|
unique_entries = []
|
||||||
|
for entry in default_network_acl.entries:
|
||||||
|
entry['CidrBlock'].should.equal('0.0.0.0/0')
|
||||||
|
entry['Protocol'].should.equal('-1')
|
||||||
|
entry['RuleNumber'].should.be.within([100, 32767])
|
||||||
|
entry['RuleAction'].should.be.within(['allow', 'deny'])
|
||||||
|
assert type(entry['Egress']) is bool
|
||||||
|
if entry['RuleAction'] == 'allow':
|
||||||
|
entry['RuleNumber'].should.be.equal(100)
|
||||||
|
else:
|
||||||
|
entry['RuleNumber'].should.be.equal(32767)
|
||||||
|
if entry not in unique_entries:
|
||||||
|
unique_entries.append(entry)
|
||||||
|
|
||||||
|
unique_entries.should.have.length_of(4)
|
||||||
|
@ -54,7 +54,7 @@ def spot_config(subnet_id, allocation_strategy="lowestPrice"):
|
|||||||
},
|
},
|
||||||
'EbsOptimized': False,
|
'EbsOptimized': False,
|
||||||
'WeightedCapacity': 2.0,
|
'WeightedCapacity': 2.0,
|
||||||
'SpotPrice': '0.13'
|
'SpotPrice': '0.13',
|
||||||
}, {
|
}, {
|
||||||
'ImageId': 'ami-123',
|
'ImageId': 'ami-123',
|
||||||
'KeyName': 'my-key',
|
'KeyName': 'my-key',
|
||||||
@ -148,6 +148,48 @@ def test_create_diversified_spot_fleet():
|
|||||||
instances[0]['InstanceId'].should.contain("i-")
|
instances[0]['InstanceId'].should.contain("i-")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
def test_create_spot_fleet_request_with_tag_spec():
|
||||||
|
conn = boto3.client("ec2", region_name='us-west-2')
|
||||||
|
subnet_id = get_subnet_id(conn)
|
||||||
|
|
||||||
|
tag_spec = [
|
||||||
|
{
|
||||||
|
'ResourceType': 'instance',
|
||||||
|
'Tags': [
|
||||||
|
{
|
||||||
|
'Key': 'tag-1',
|
||||||
|
'Value': 'foo',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Key': 'tag-2',
|
||||||
|
'Value': 'bar',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
config = spot_config(subnet_id)
|
||||||
|
config['LaunchSpecifications'][0]['TagSpecifications'] = tag_spec
|
||||||
|
spot_fleet_res = conn.request_spot_fleet(
|
||||||
|
SpotFleetRequestConfig=config
|
||||||
|
)
|
||||||
|
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||||
|
spot_fleet_requests = conn.describe_spot_fleet_requests(
|
||||||
|
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs']
|
||||||
|
spot_fleet_config = spot_fleet_requests[0]['SpotFleetRequestConfig']
|
||||||
|
spot_fleet_config['LaunchSpecifications'][0]['TagSpecifications'][0][
|
||||||
|
'ResourceType'].should.equal('instance')
|
||||||
|
for tag in tag_spec[0]['Tags']:
|
||||||
|
spot_fleet_config['LaunchSpecifications'][0]['TagSpecifications'][0]['Tags'].should.contain(tag)
|
||||||
|
|
||||||
|
instance_res = conn.describe_spot_fleet_instances(
|
||||||
|
SpotFleetRequestId=spot_fleet_id)
|
||||||
|
instances = conn.describe_instances(InstanceIds=[i['InstanceId'] for i in instance_res['ActiveInstances']])
|
||||||
|
for instance in instances['Reservations'][0]['Instances']:
|
||||||
|
for tag in tag_spec[0]['Tags']:
|
||||||
|
instance['Tags'].should.contain(tag)
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_cancel_spot_fleet_request():
|
def test_cancel_spot_fleet_request():
|
||||||
conn = boto3.client("ec2", region_name='us-west-2')
|
conn = boto3.client("ec2", region_name='us-west-2')
|
||||||
|
@ -5,6 +5,7 @@ import itertools
|
|||||||
|
|
||||||
import boto
|
import boto
|
||||||
import boto3
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
from boto.exception import EC2ResponseError
|
from boto.exception import EC2ResponseError
|
||||||
from boto.ec2.instance import Reservation
|
from boto.ec2.instance import Reservation
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
@ -451,3 +452,31 @@ def test_create_snapshot_with_tags():
|
|||||||
}]
|
}]
|
||||||
|
|
||||||
assert snapshot['Tags'] == expected_tags
|
assert snapshot['Tags'] == expected_tags
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
def test_create_tag_empty_resource():
|
||||||
|
# create ec2 client in us-west-1
|
||||||
|
client = boto3.client('ec2', region_name='us-west-1')
|
||||||
|
# create tag with empty resource
|
||||||
|
with assert_raises(ClientError) as ex:
|
||||||
|
client.create_tags(
|
||||||
|
Resources=[],
|
||||||
|
Tags=[{'Key': 'Value'}]
|
||||||
|
)
|
||||||
|
ex.exception.response['Error']['Code'].should.equal('MissingParameter')
|
||||||
|
ex.exception.response['Error']['Message'].should.equal('The request must contain the parameter resourceIdSet')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
def test_delete_tag_empty_resource():
|
||||||
|
# create ec2 client in us-west-1
|
||||||
|
client = boto3.client('ec2', region_name='us-west-1')
|
||||||
|
# delete tag with empty resource
|
||||||
|
with assert_raises(ClientError) as ex:
|
||||||
|
client.delete_tags(
|
||||||
|
Resources=[],
|
||||||
|
Tags=[{'Key': 'Value'}]
|
||||||
|
)
|
||||||
|
ex.exception.response['Error']['Code'].should.equal('MissingParameter')
|
||||||
|
ex.exception.response['Error']['Message'].should.equal('The request must contain the parameter resourceIdSet')
|
||||||
|
@ -89,7 +89,8 @@ def test_vpc_peering_connections_delete():
|
|||||||
verdict.should.equal(True)
|
verdict.should.equal(True)
|
||||||
|
|
||||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||||
all_vpc_pcxs.should.have.length_of(0)
|
all_vpc_pcxs.should.have.length_of(1)
|
||||||
|
all_vpc_pcxs[0]._status.code.should.equal('deleted')
|
||||||
|
|
||||||
with assert_raises(EC2ResponseError) as cm:
|
with assert_raises(EC2ResponseError) as cm:
|
||||||
conn.delete_vpc_peering_connection("pcx-1234abcd")
|
conn.delete_vpc_peering_connection("pcx-1234abcd")
|
||||||
|
@ -47,6 +47,15 @@ def test_list_clusters():
|
|||||||
'arn:aws:ecs:us-east-1:012345678910:cluster/test_cluster1')
|
'arn:aws:ecs:us-east-1:012345678910:cluster/test_cluster1')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ecs
|
||||||
|
def test_describe_clusters():
|
||||||
|
client = boto3.client('ecs', region_name='us-east-1')
|
||||||
|
response = client.describe_clusters(clusters=["some-cluster"])
|
||||||
|
response['failures'].should.contain({
|
||||||
|
'arn': 'arn:aws:ecs:us-east-1:012345678910:cluster/some-cluster',
|
||||||
|
'reason': 'MISSING'
|
||||||
|
})
|
||||||
|
|
||||||
@mock_ecs
|
@mock_ecs
|
||||||
def test_delete_cluster():
|
def test_delete_cluster():
|
||||||
client = boto3.client('ecs', region_name='us-east-1')
|
client = boto3.client('ecs', region_name='us-east-1')
|
||||||
@ -379,23 +388,32 @@ def test_list_services():
|
|||||||
cluster='test_ecs_cluster',
|
cluster='test_ecs_cluster',
|
||||||
serviceName='test_ecs_service1',
|
serviceName='test_ecs_service1',
|
||||||
taskDefinition='test_ecs_task',
|
taskDefinition='test_ecs_task',
|
||||||
|
schedulingStrategy='REPLICA',
|
||||||
desiredCount=2
|
desiredCount=2
|
||||||
)
|
)
|
||||||
_ = client.create_service(
|
_ = client.create_service(
|
||||||
cluster='test_ecs_cluster',
|
cluster='test_ecs_cluster',
|
||||||
serviceName='test_ecs_service2',
|
serviceName='test_ecs_service2',
|
||||||
taskDefinition='test_ecs_task',
|
taskDefinition='test_ecs_task',
|
||||||
|
schedulingStrategy='DAEMON',
|
||||||
desiredCount=2
|
desiredCount=2
|
||||||
)
|
)
|
||||||
response = client.list_services(
|
unfiltered_response = client.list_services(
|
||||||
cluster='test_ecs_cluster'
|
cluster='test_ecs_cluster'
|
||||||
)
|
)
|
||||||
len(response['serviceArns']).should.equal(2)
|
len(unfiltered_response['serviceArns']).should.equal(2)
|
||||||
response['serviceArns'][0].should.equal(
|
unfiltered_response['serviceArns'][0].should.equal(
|
||||||
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service1')
|
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service1')
|
||||||
response['serviceArns'][1].should.equal(
|
unfiltered_response['serviceArns'][1].should.equal(
|
||||||
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2')
|
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2')
|
||||||
|
|
||||||
|
filtered_response = client.list_services(
|
||||||
|
cluster='test_ecs_cluster',
|
||||||
|
schedulingStrategy='REPLICA'
|
||||||
|
)
|
||||||
|
len(filtered_response['serviceArns']).should.equal(1)
|
||||||
|
filtered_response['serviceArns'][0].should.equal(
|
||||||
|
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service1')
|
||||||
|
|
||||||
@mock_ecs
|
@mock_ecs
|
||||||
def test_describe_services():
|
def test_describe_services():
|
||||||
@ -925,6 +943,65 @@ def test_update_container_instances_state():
|
|||||||
status='test_status').should.throw(Exception)
|
status='test_status').should.throw(Exception)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
@mock_ecs
|
||||||
|
def test_update_container_instances_state_by_arn():
|
||||||
|
ecs_client = boto3.client('ecs', region_name='us-east-1')
|
||||||
|
ec2 = boto3.resource('ec2', region_name='us-east-1')
|
||||||
|
|
||||||
|
test_cluster_name = 'test_ecs_cluster'
|
||||||
|
_ = ecs_client.create_cluster(
|
||||||
|
clusterName=test_cluster_name
|
||||||
|
)
|
||||||
|
|
||||||
|
instance_to_create = 3
|
||||||
|
test_instance_arns = []
|
||||||
|
for i in range(0, instance_to_create):
|
||||||
|
test_instance = ec2.create_instances(
|
||||||
|
ImageId="ami-1234abcd",
|
||||||
|
MinCount=1,
|
||||||
|
MaxCount=1,
|
||||||
|
)[0]
|
||||||
|
|
||||||
|
instance_id_document = json.dumps(
|
||||||
|
ec2_utils.generate_instance_identity_document(test_instance)
|
||||||
|
)
|
||||||
|
|
||||||
|
response = ecs_client.register_container_instance(
|
||||||
|
cluster=test_cluster_name,
|
||||||
|
instanceIdentityDocument=instance_id_document)
|
||||||
|
|
||||||
|
test_instance_arns.append(response['containerInstance']['containerInstanceArn'])
|
||||||
|
|
||||||
|
response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
|
||||||
|
containerInstances=test_instance_arns,
|
||||||
|
status='DRAINING')
|
||||||
|
len(response['failures']).should.equal(0)
|
||||||
|
len(response['containerInstances']).should.equal(instance_to_create)
|
||||||
|
response_statuses = [ci['status'] for ci in response['containerInstances']]
|
||||||
|
for status in response_statuses:
|
||||||
|
status.should.equal('DRAINING')
|
||||||
|
response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
|
||||||
|
containerInstances=test_instance_arns,
|
||||||
|
status='DRAINING')
|
||||||
|
len(response['failures']).should.equal(0)
|
||||||
|
len(response['containerInstances']).should.equal(instance_to_create)
|
||||||
|
response_statuses = [ci['status'] for ci in response['containerInstances']]
|
||||||
|
for status in response_statuses:
|
||||||
|
status.should.equal('DRAINING')
|
||||||
|
response = ecs_client.update_container_instances_state(cluster=test_cluster_name,
|
||||||
|
containerInstances=test_instance_arns,
|
||||||
|
status='ACTIVE')
|
||||||
|
len(response['failures']).should.equal(0)
|
||||||
|
len(response['containerInstances']).should.equal(instance_to_create)
|
||||||
|
response_statuses = [ci['status'] for ci in response['containerInstances']]
|
||||||
|
for status in response_statuses:
|
||||||
|
status.should.equal('ACTIVE')
|
||||||
|
ecs_client.update_container_instances_state.when.called_with(cluster=test_cluster_name,
|
||||||
|
containerInstances=test_instance_arns,
|
||||||
|
status='test_status').should.throw(Exception)
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@mock_ecs
|
@mock_ecs
|
||||||
def test_run_task():
|
def test_run_task():
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import random
|
import random
|
||||||
|
|
||||||
import boto3
|
import boto3
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@ -7,7 +6,6 @@ from moto.events import mock_events
|
|||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
|
|
||||||
RULES = [
|
RULES = [
|
||||||
{'Name': 'test1', 'ScheduleExpression': 'rate(5 minutes)'},
|
{'Name': 'test1', 'ScheduleExpression': 'rate(5 minutes)'},
|
||||||
{'Name': 'test2', 'ScheduleExpression': 'rate(1 minute)'},
|
{'Name': 'test2', 'ScheduleExpression': 'rate(1 minute)'},
|
||||||
@ -109,6 +107,13 @@ def test_enable_disable_rule():
|
|||||||
rule = client.describe_rule(Name=rule_name)
|
rule = client.describe_rule(Name=rule_name)
|
||||||
assert(rule['State'] == 'ENABLED')
|
assert(rule['State'] == 'ENABLED')
|
||||||
|
|
||||||
|
# Test invalid name
|
||||||
|
try:
|
||||||
|
client.enable_rule(Name='junk')
|
||||||
|
|
||||||
|
except ClientError as ce:
|
||||||
|
assert ce.response['Error']['Code'] == 'ResourceNotFoundException'
|
||||||
|
|
||||||
|
|
||||||
@mock_events
|
@mock_events
|
||||||
def test_list_rule_names_by_target():
|
def test_list_rule_names_by_target():
|
||||||
|
@ -3,7 +3,9 @@ import base64
|
|||||||
|
|
||||||
import boto
|
import boto
|
||||||
import boto3
|
import boto3
|
||||||
|
import os
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
import sys
|
||||||
from boto.exception import BotoServerError
|
from boto.exception import BotoServerError
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from moto import mock_iam, mock_iam_deprecated
|
from moto import mock_iam, mock_iam_deprecated
|
||||||
@ -11,9 +13,23 @@ from moto.iam.models import aws_managed_policies
|
|||||||
from nose.tools import assert_raises, assert_equals
|
from nose.tools import assert_raises, assert_equals
|
||||||
from nose.tools import raises
|
from nose.tools import raises
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
from tests.helpers import requires_boto_gte
|
from tests.helpers import requires_boto_gte
|
||||||
|
|
||||||
|
|
||||||
|
MOCK_CERT = """-----BEGIN CERTIFICATE-----
|
||||||
|
MIIBpzCCARACCQCY5yOdxCTrGjANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQKDAxt
|
||||||
|
b3RvIHRlc3RpbmcwIBcNMTgxMTA1MTkwNTIwWhgPMjI5MjA4MTkxOTA1MjBaMBcx
|
||||||
|
FTATBgNVBAoMDG1vdG8gdGVzdGluZzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC
|
||||||
|
gYEA1Jn3g2h7LD3FLqdpcYNbFXCS4V4eDpuTCje9vKFcC3pi/01147X3zdfPy8Mt
|
||||||
|
ZhKxcREOwm4NXykh23P9KW7fBovpNwnbYsbPqj8Hf1ZaClrgku1arTVhEnKjx8zO
|
||||||
|
vaR/bVLCss4uE0E0VM1tJn/QGQsfthFsjuHtwx8uIWz35tUCAwEAATANBgkqhkiG
|
||||||
|
9w0BAQsFAAOBgQBWdOQ7bDc2nWkUhFjZoNIZrqjyNdjlMUndpwREVD7FQ/DuxJMj
|
||||||
|
FyDHrtlrS80dPUQWNYHw++oACDpWO01LGLPPrGmuO/7cOdojPEd852q5gd+7W9xt
|
||||||
|
8vUH+pBa6IBLbvBp+szli51V3TLSWcoyy4ceJNQU2vCkTLoFdS0RLd/7tQ==
|
||||||
|
-----END CERTIFICATE-----"""
|
||||||
|
|
||||||
|
|
||||||
@mock_iam_deprecated()
|
@mock_iam_deprecated()
|
||||||
def test_get_all_server_certs():
|
def test_get_all_server_certs():
|
||||||
conn = boto.connect_iam()
|
conn = boto.connect_iam()
|
||||||
@ -108,6 +124,10 @@ def test_create_role_and_instance_profile():
|
|||||||
|
|
||||||
conn.list_roles().roles[0].role_name.should.equal('my-role')
|
conn.list_roles().roles[0].role_name.should.equal('my-role')
|
||||||
|
|
||||||
|
# Test with an empty path:
|
||||||
|
profile = conn.create_instance_profile('my-other-profile')
|
||||||
|
profile.path.should.equal('/')
|
||||||
|
|
||||||
|
|
||||||
@mock_iam_deprecated()
|
@mock_iam_deprecated()
|
||||||
def test_remove_role_from_instance_profile():
|
def test_remove_role_from_instance_profile():
|
||||||
@ -283,8 +303,18 @@ def test_create_policy_versions():
|
|||||||
PolicyDocument='{"some":"policy"}')
|
PolicyDocument='{"some":"policy"}')
|
||||||
version = conn.create_policy_version(
|
version = conn.create_policy_version(
|
||||||
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
|
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
|
||||||
PolicyDocument='{"some":"policy"}')
|
PolicyDocument='{"some":"policy"}',
|
||||||
|
SetAsDefault=True)
|
||||||
version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'})
|
version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'})
|
||||||
|
version.get('PolicyVersion').get('VersionId').should.equal("v2")
|
||||||
|
conn.delete_policy_version(
|
||||||
|
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
|
||||||
|
VersionId="v1")
|
||||||
|
version = conn.create_policy_version(
|
||||||
|
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
|
||||||
|
PolicyDocument='{"some":"policy"}')
|
||||||
|
version.get('PolicyVersion').get('VersionId').should.equal("v3")
|
||||||
|
|
||||||
|
|
||||||
@mock_iam
|
@mock_iam
|
||||||
def test_get_policy():
|
def test_get_policy():
|
||||||
@ -380,6 +410,19 @@ def test_get_user():
|
|||||||
conn.get_user('my-user')
|
conn.get_user('my-user')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_update_user():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
with assert_raises(conn.exceptions.NoSuchEntityException):
|
||||||
|
conn.update_user(UserName='my-user')
|
||||||
|
conn.create_user(UserName='my-user')
|
||||||
|
conn.update_user(UserName='my-user', NewPath='/new-path/', NewUserName='new-user')
|
||||||
|
response = conn.get_user(UserName='new-user')
|
||||||
|
response['User'].get('Path').should.equal('/new-path/')
|
||||||
|
with assert_raises(conn.exceptions.NoSuchEntityException):
|
||||||
|
conn.get_user(UserName='my-user')
|
||||||
|
|
||||||
|
|
||||||
@mock_iam_deprecated()
|
@mock_iam_deprecated()
|
||||||
def test_get_current_user():
|
def test_get_current_user():
|
||||||
"""If no user is specific, IAM returns the current user"""
|
"""If no user is specific, IAM returns the current user"""
|
||||||
@ -536,6 +579,14 @@ def test_generate_credential_report():
|
|||||||
result['generate_credential_report_response'][
|
result['generate_credential_report_response'][
|
||||||
'generate_credential_report_result']['state'].should.equal('COMPLETE')
|
'generate_credential_report_result']['state'].should.equal('COMPLETE')
|
||||||
|
|
||||||
|
@mock_iam
|
||||||
|
def test_boto3_generate_credential_report():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
result = conn.generate_credential_report()
|
||||||
|
result['State'].should.equal('STARTED')
|
||||||
|
result = conn.generate_credential_report()
|
||||||
|
result['State'].should.equal('COMPLETE')
|
||||||
|
|
||||||
|
|
||||||
@mock_iam_deprecated()
|
@mock_iam_deprecated()
|
||||||
def test_get_credential_report():
|
def test_get_credential_report():
|
||||||
@ -552,6 +603,20 @@ def test_get_credential_report():
|
|||||||
report.should.match(r'.*my-user.*')
|
report.should.match(r'.*my-user.*')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam
|
||||||
|
def test_boto3_get_credential_report():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
conn.create_user(UserName='my-user')
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
conn.get_credential_report()
|
||||||
|
result = conn.generate_credential_report()
|
||||||
|
while result['State'] != 'COMPLETE':
|
||||||
|
result = conn.generate_credential_report()
|
||||||
|
result = conn.get_credential_report()
|
||||||
|
report = result['Content'].decode('utf-8')
|
||||||
|
report.should.match(r'.*my-user.*')
|
||||||
|
|
||||||
|
|
||||||
@requires_boto_gte('2.39')
|
@requires_boto_gte('2.39')
|
||||||
@mock_iam_deprecated()
|
@mock_iam_deprecated()
|
||||||
def test_managed_policy():
|
def test_managed_policy():
|
||||||
@ -695,17 +760,28 @@ def test_update_access_key():
|
|||||||
resp['AccessKeyMetadata'][0]['Status'].should.equal('Inactive')
|
resp['AccessKeyMetadata'][0]['Status'].should.equal('Inactive')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam
|
||||||
|
def test_get_access_key_last_used():
|
||||||
|
iam = boto3.resource('iam', region_name='us-east-1')
|
||||||
|
client = iam.meta.client
|
||||||
|
username = 'test-user'
|
||||||
|
iam.create_user(UserName=username)
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
client.get_access_key_last_used(AccessKeyId='non-existent-key-id')
|
||||||
|
create_key_response = client.create_access_key(UserName=username)['AccessKey']
|
||||||
|
resp = client.get_access_key_last_used(AccessKeyId=create_key_response['AccessKeyId'])
|
||||||
|
|
||||||
|
datetime.strftime(resp["AccessKeyLastUsed"]["LastUsedDate"], "%Y-%m-%d").should.equal(datetime.strftime(
|
||||||
|
datetime.utcnow(),
|
||||||
|
"%Y-%m-%d"
|
||||||
|
))
|
||||||
|
resp["UserName"].should.equal(create_key_response["UserName"])
|
||||||
|
|
||||||
|
|
||||||
@mock_iam
|
@mock_iam
|
||||||
def test_get_account_authorization_details():
|
def test_get_account_authorization_details():
|
||||||
import json
|
import json
|
||||||
conn = boto3.client('iam', region_name='us-east-1')
|
test_policy = json.dumps({
|
||||||
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
|
|
||||||
conn.create_user(Path='/', UserName='testCloudAuxUser')
|
|
||||||
conn.create_group(Path='/', GroupName='testCloudAuxGroup')
|
|
||||||
conn.create_policy(
|
|
||||||
PolicyName='testCloudAuxPolicy',
|
|
||||||
Path='/',
|
|
||||||
PolicyDocument=json.dumps({
|
|
||||||
"Version": "2012-10-17",
|
"Version": "2012-10-17",
|
||||||
"Statement": [
|
"Statement": [
|
||||||
{
|
{
|
||||||
@ -714,47 +790,506 @@ def test_get_account_authorization_details():
|
|||||||
"Effect": "Allow",
|
"Effect": "Allow",
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}),
|
})
|
||||||
Description='Test CloudAux Policy'
|
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
|
||||||
|
conn.create_user(Path='/', UserName='testUser')
|
||||||
|
conn.create_group(Path='/', GroupName='testGroup')
|
||||||
|
conn.create_policy(
|
||||||
|
PolicyName='testPolicy',
|
||||||
|
Path='/',
|
||||||
|
PolicyDocument=test_policy,
|
||||||
|
Description='Test Policy'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Attach things to the user and group:
|
||||||
|
conn.put_user_policy(UserName='testUser', PolicyName='testPolicy', PolicyDocument=test_policy)
|
||||||
|
conn.put_group_policy(GroupName='testGroup', PolicyName='testPolicy', PolicyDocument=test_policy)
|
||||||
|
|
||||||
|
conn.attach_user_policy(UserName='testUser', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy')
|
||||||
|
conn.attach_group_policy(GroupName='testGroup', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy')
|
||||||
|
|
||||||
|
conn.add_user_to_group(UserName='testUser', GroupName='testGroup')
|
||||||
|
|
||||||
|
# Add things to the role:
|
||||||
|
conn.create_instance_profile(InstanceProfileName='ipn')
|
||||||
|
conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role')
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[
|
||||||
|
{
|
||||||
|
'Key': 'somekey',
|
||||||
|
'Value': 'somevalue'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Key': 'someotherkey',
|
||||||
|
'Value': 'someothervalue'
|
||||||
|
}
|
||||||
|
])
|
||||||
|
conn.put_role_policy(RoleName='my-role', PolicyName='test-policy', PolicyDocument=test_policy)
|
||||||
|
conn.attach_role_policy(RoleName='my-role', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy')
|
||||||
|
|
||||||
result = conn.get_account_authorization_details(Filter=['Role'])
|
result = conn.get_account_authorization_details(Filter=['Role'])
|
||||||
len(result['RoleDetailList']) == 1
|
assert len(result['RoleDetailList']) == 1
|
||||||
len(result['UserDetailList']) == 0
|
assert len(result['UserDetailList']) == 0
|
||||||
len(result['GroupDetailList']) == 0
|
assert len(result['GroupDetailList']) == 0
|
||||||
len(result['Policies']) == 0
|
assert len(result['Policies']) == 0
|
||||||
|
assert len(result['RoleDetailList'][0]['InstanceProfileList']) == 1
|
||||||
|
assert len(result['RoleDetailList'][0]['Tags']) == 2
|
||||||
|
assert len(result['RoleDetailList'][0]['RolePolicyList']) == 1
|
||||||
|
assert len(result['RoleDetailList'][0]['AttachedManagedPolicies']) == 1
|
||||||
|
assert result['RoleDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy'
|
||||||
|
assert result['RoleDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \
|
||||||
|
'arn:aws:iam::123456789012:policy/testPolicy'
|
||||||
|
|
||||||
result = conn.get_account_authorization_details(Filter=['User'])
|
result = conn.get_account_authorization_details(Filter=['User'])
|
||||||
len(result['RoleDetailList']) == 0
|
assert len(result['RoleDetailList']) == 0
|
||||||
len(result['UserDetailList']) == 1
|
assert len(result['UserDetailList']) == 1
|
||||||
len(result['GroupDetailList']) == 0
|
assert len(result['UserDetailList'][0]['GroupList']) == 1
|
||||||
len(result['Policies']) == 0
|
assert len(result['UserDetailList'][0]['AttachedManagedPolicies']) == 1
|
||||||
|
assert len(result['GroupDetailList']) == 0
|
||||||
|
assert len(result['Policies']) == 0
|
||||||
|
assert result['UserDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy'
|
||||||
|
assert result['UserDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \
|
||||||
|
'arn:aws:iam::123456789012:policy/testPolicy'
|
||||||
|
|
||||||
result = conn.get_account_authorization_details(Filter=['Group'])
|
result = conn.get_account_authorization_details(Filter=['Group'])
|
||||||
len(result['RoleDetailList']) == 0
|
assert len(result['RoleDetailList']) == 0
|
||||||
len(result['UserDetailList']) == 0
|
assert len(result['UserDetailList']) == 0
|
||||||
len(result['GroupDetailList']) == 1
|
assert len(result['GroupDetailList']) == 1
|
||||||
len(result['Policies']) == 0
|
assert len(result['GroupDetailList'][0]['GroupPolicyList']) == 1
|
||||||
|
assert len(result['GroupDetailList'][0]['AttachedManagedPolicies']) == 1
|
||||||
|
assert len(result['Policies']) == 0
|
||||||
|
assert result['GroupDetailList'][0]['AttachedManagedPolicies'][0]['PolicyName'] == 'testPolicy'
|
||||||
|
assert result['GroupDetailList'][0]['AttachedManagedPolicies'][0]['PolicyArn'] == \
|
||||||
|
'arn:aws:iam::123456789012:policy/testPolicy'
|
||||||
|
|
||||||
result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy'])
|
result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy'])
|
||||||
len(result['RoleDetailList']) == 0
|
assert len(result['RoleDetailList']) == 0
|
||||||
len(result['UserDetailList']) == 0
|
assert len(result['UserDetailList']) == 0
|
||||||
len(result['GroupDetailList']) == 0
|
assert len(result['GroupDetailList']) == 0
|
||||||
len(result['Policies']) == 1
|
assert len(result['Policies']) == 1
|
||||||
|
assert len(result['Policies'][0]['PolicyVersionList']) == 1
|
||||||
|
|
||||||
# Check for greater than 1 since this should always be greater than one but might change.
|
# Check for greater than 1 since this should always be greater than one but might change.
|
||||||
# See iam/aws_managed_policies.py
|
# See iam/aws_managed_policies.py
|
||||||
result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy'])
|
result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy'])
|
||||||
len(result['RoleDetailList']) == 0
|
assert len(result['RoleDetailList']) == 0
|
||||||
len(result['UserDetailList']) == 0
|
assert len(result['UserDetailList']) == 0
|
||||||
len(result['GroupDetailList']) == 0
|
assert len(result['GroupDetailList']) == 0
|
||||||
len(result['Policies']) > 1
|
assert len(result['Policies']) > 1
|
||||||
|
|
||||||
result = conn.get_account_authorization_details()
|
result = conn.get_account_authorization_details()
|
||||||
len(result['RoleDetailList']) == 1
|
assert len(result['RoleDetailList']) == 1
|
||||||
len(result['UserDetailList']) == 1
|
assert len(result['UserDetailList']) == 1
|
||||||
len(result['GroupDetailList']) == 1
|
assert len(result['GroupDetailList']) == 1
|
||||||
len(result['Policies']) > 1
|
assert len(result['Policies']) > 1
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam
|
||||||
|
def test_signing_certs():
|
||||||
|
client = boto3.client('iam', region_name='us-east-1')
|
||||||
|
|
||||||
|
# Create the IAM user first:
|
||||||
|
client.create_user(UserName='testing')
|
||||||
|
|
||||||
|
# Upload the cert:
|
||||||
|
resp = client.upload_signing_certificate(UserName='testing', CertificateBody=MOCK_CERT)['Certificate']
|
||||||
|
cert_id = resp['CertificateId']
|
||||||
|
|
||||||
|
assert resp['UserName'] == 'testing'
|
||||||
|
assert resp['Status'] == 'Active'
|
||||||
|
assert resp['CertificateBody'] == MOCK_CERT
|
||||||
|
assert resp['CertificateId']
|
||||||
|
|
||||||
|
# Upload a the cert with an invalid body:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.upload_signing_certificate(UserName='testing', CertificateBody='notacert')
|
||||||
|
assert ce.exception.response['Error']['Code'] == 'MalformedCertificate'
|
||||||
|
|
||||||
|
# Upload with an invalid user:
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
client.upload_signing_certificate(UserName='notauser', CertificateBody=MOCK_CERT)
|
||||||
|
|
||||||
|
# Update:
|
||||||
|
client.update_signing_certificate(UserName='testing', CertificateId=cert_id, Status='Inactive')
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
client.update_signing_certificate(UserName='notauser', CertificateId=cert_id, Status='Inactive')
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
client.update_signing_certificate(UserName='testing', CertificateId='x' * 32, Status='Inactive')
|
||||||
|
|
||||||
|
assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format(
|
||||||
|
id='x' * 32)
|
||||||
|
|
||||||
|
# List the certs:
|
||||||
|
resp = client.list_signing_certificates(UserName='testing')['Certificates']
|
||||||
|
assert len(resp) == 1
|
||||||
|
assert resp[0]['CertificateBody'] == MOCK_CERT
|
||||||
|
assert resp[0]['Status'] == 'Inactive' # Changed with the update call above.
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
client.list_signing_certificates(UserName='notauser')
|
||||||
|
|
||||||
|
# Delete:
|
||||||
|
client.delete_signing_certificate(UserName='testing', CertificateId=cert_id)
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
client.delete_signing_certificate(UserName='notauser', CertificateId=cert_id)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_create_saml_provider():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
response = conn.create_saml_provider(
|
||||||
|
Name="TestSAMLProvider",
|
||||||
|
SAMLMetadataDocument='a' * 1024
|
||||||
|
)
|
||||||
|
response['SAMLProviderArn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_get_saml_provider():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
saml_provider_create = conn.create_saml_provider(
|
||||||
|
Name="TestSAMLProvider",
|
||||||
|
SAMLMetadataDocument='a' * 1024
|
||||||
|
)
|
||||||
|
response = conn.get_saml_provider(
|
||||||
|
SAMLProviderArn=saml_provider_create['SAMLProviderArn']
|
||||||
|
)
|
||||||
|
response['SAMLMetadataDocument'].should.equal('a' * 1024)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_list_saml_providers():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
conn.create_saml_provider(
|
||||||
|
Name="TestSAMLProvider",
|
||||||
|
SAMLMetadataDocument='a' * 1024
|
||||||
|
)
|
||||||
|
response = conn.list_saml_providers()
|
||||||
|
response['SAMLProviderList'][0]['Arn'].should.equal("arn:aws:iam::123456789012:saml-provider/TestSAMLProvider")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_delete_saml_provider():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
saml_provider_create = conn.create_saml_provider(
|
||||||
|
Name="TestSAMLProvider",
|
||||||
|
SAMLMetadataDocument='a' * 1024
|
||||||
|
)
|
||||||
|
response = conn.list_saml_providers()
|
||||||
|
len(response['SAMLProviderList']).should.equal(1)
|
||||||
|
conn.delete_saml_provider(
|
||||||
|
SAMLProviderArn=saml_provider_create['SAMLProviderArn']
|
||||||
|
)
|
||||||
|
response = conn.list_saml_providers()
|
||||||
|
len(response['SAMLProviderList']).should.equal(0)
|
||||||
|
conn.create_user(UserName='testing')
|
||||||
|
|
||||||
|
cert_id = '123456789012345678901234'
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.delete_signing_certificate(UserName='testing', CertificateId=cert_id)
|
||||||
|
|
||||||
|
assert ce.exception.response['Error']['Message'] == 'The Certificate with id {id} cannot be found.'.format(
|
||||||
|
id=cert_id)
|
||||||
|
|
||||||
|
# Verify that it's not in the list:
|
||||||
|
resp = conn.list_signing_certificates(UserName='testing')
|
||||||
|
assert not resp['Certificates']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_tag_role():
|
||||||
|
"""Tests both the tag_role and get_role_tags capability"""
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="{}")
|
||||||
|
|
||||||
|
# Get without tags:
|
||||||
|
role = conn.get_role(RoleName='my-role')['Role']
|
||||||
|
assert not role.get('Tags')
|
||||||
|
|
||||||
|
# With proper tag values:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[
|
||||||
|
{
|
||||||
|
'Key': 'somekey',
|
||||||
|
'Value': 'somevalue'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Key': 'someotherkey',
|
||||||
|
'Value': 'someothervalue'
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
# Get role:
|
||||||
|
role = conn.get_role(RoleName='my-role')['Role']
|
||||||
|
assert len(role['Tags']) == 2
|
||||||
|
assert role['Tags'][0]['Key'] == 'somekey'
|
||||||
|
assert role['Tags'][0]['Value'] == 'somevalue'
|
||||||
|
assert role['Tags'][1]['Key'] == 'someotherkey'
|
||||||
|
assert role['Tags'][1]['Value'] == 'someothervalue'
|
||||||
|
|
||||||
|
# Same -- but for list_role_tags:
|
||||||
|
tags = conn.list_role_tags(RoleName='my-role')
|
||||||
|
assert len(tags['Tags']) == 2
|
||||||
|
assert role['Tags'][0]['Key'] == 'somekey'
|
||||||
|
assert role['Tags'][0]['Value'] == 'somevalue'
|
||||||
|
assert role['Tags'][1]['Key'] == 'someotherkey'
|
||||||
|
assert role['Tags'][1]['Value'] == 'someothervalue'
|
||||||
|
assert not tags['IsTruncated']
|
||||||
|
assert not tags.get('Marker')
|
||||||
|
|
||||||
|
# Test pagination:
|
||||||
|
tags = conn.list_role_tags(RoleName='my-role', MaxItems=1)
|
||||||
|
assert len(tags['Tags']) == 1
|
||||||
|
assert tags['IsTruncated']
|
||||||
|
assert tags['Tags'][0]['Key'] == 'somekey'
|
||||||
|
assert tags['Tags'][0]['Value'] == 'somevalue'
|
||||||
|
assert tags['Marker'] == '1'
|
||||||
|
|
||||||
|
tags = conn.list_role_tags(RoleName='my-role', Marker=tags['Marker'])
|
||||||
|
assert len(tags['Tags']) == 1
|
||||||
|
assert tags['Tags'][0]['Key'] == 'someotherkey'
|
||||||
|
assert tags['Tags'][0]['Value'] == 'someothervalue'
|
||||||
|
assert not tags['IsTruncated']
|
||||||
|
assert not tags.get('Marker')
|
||||||
|
|
||||||
|
# Test updating an existing tag:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[
|
||||||
|
{
|
||||||
|
'Key': 'somekey',
|
||||||
|
'Value': 'somenewvalue'
|
||||||
|
}
|
||||||
|
])
|
||||||
|
tags = conn.list_role_tags(RoleName='my-role')
|
||||||
|
assert len(tags['Tags']) == 2
|
||||||
|
assert tags['Tags'][0]['Key'] == 'somekey'
|
||||||
|
assert tags['Tags'][0]['Value'] == 'somenewvalue'
|
||||||
|
|
||||||
|
# Empty is good:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[
|
||||||
|
{
|
||||||
|
'Key': 'somekey',
|
||||||
|
'Value': ''
|
||||||
|
}
|
||||||
|
])
|
||||||
|
tags = conn.list_role_tags(RoleName='my-role')
|
||||||
|
assert len(tags['Tags']) == 2
|
||||||
|
assert tags['Tags'][0]['Key'] == 'somekey'
|
||||||
|
assert tags['Tags'][0]['Value'] == ''
|
||||||
|
|
||||||
|
# Test creating tags with invalid values:
|
||||||
|
# With more than 50 tags:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
too_many_tags = list(map(lambda x: {'Key': str(x), 'Value': str(x)}, range(0, 51)))
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=too_many_tags)
|
||||||
|
assert 'failed to satisfy constraint: Member must have length less than or equal to 50.' \
|
||||||
|
in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With a duplicate tag:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[{'Key': '0', 'Value': ''}, {'Key': '0', 'Value': ''}])
|
||||||
|
assert 'Duplicate tag keys found. Please note that Tag keys are case insensitive.' \
|
||||||
|
in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# Duplicate tag with different casing:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[{'Key': 'a', 'Value': ''}, {'Key': 'A', 'Value': ''}])
|
||||||
|
assert 'Duplicate tag keys found. Please note that Tag keys are case insensitive.' \
|
||||||
|
in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With a really big key:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[{'Key': '0' * 129, 'Value': ''}])
|
||||||
|
assert 'Member must have length less than or equal to 128.' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With a really big value:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[{'Key': '0', 'Value': '0' * 257}])
|
||||||
|
assert 'Member must have length less than or equal to 256.' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With an invalid character:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[{'Key': 'NOWAY!', 'Value': ''}])
|
||||||
|
assert 'Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+' \
|
||||||
|
in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With a role that doesn't exist:
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
conn.tag_role(RoleName='notarole', Tags=[{'Key': 'some', 'Value': 'value'}])
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam
|
||||||
|
def test_untag_role():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="{}")
|
||||||
|
|
||||||
|
# With proper tag values:
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[
|
||||||
|
{
|
||||||
|
'Key': 'somekey',
|
||||||
|
'Value': 'somevalue'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Key': 'someotherkey',
|
||||||
|
'Value': 'someothervalue'
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
# Remove them:
|
||||||
|
conn.untag_role(RoleName='my-role', TagKeys=['somekey'])
|
||||||
|
tags = conn.list_role_tags(RoleName='my-role')
|
||||||
|
assert len(tags['Tags']) == 1
|
||||||
|
assert tags['Tags'][0]['Key'] == 'someotherkey'
|
||||||
|
assert tags['Tags'][0]['Value'] == 'someothervalue'
|
||||||
|
|
||||||
|
# And again:
|
||||||
|
conn.untag_role(RoleName='my-role', TagKeys=['someotherkey'])
|
||||||
|
tags = conn.list_role_tags(RoleName='my-role')
|
||||||
|
assert not tags['Tags']
|
||||||
|
|
||||||
|
# Test removing tags with invalid values:
|
||||||
|
# With more than 50 tags:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.untag_role(RoleName='my-role', TagKeys=[str(x) for x in range(0, 51)])
|
||||||
|
assert 'failed to satisfy constraint: Member must have length less than or equal to 50.' \
|
||||||
|
in ce.exception.response['Error']['Message']
|
||||||
|
assert 'tagKeys' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With a really big key:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.untag_role(RoleName='my-role', TagKeys=['0' * 129])
|
||||||
|
assert 'Member must have length less than or equal to 128.' in ce.exception.response['Error']['Message']
|
||||||
|
assert 'tagKeys' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With an invalid character:
|
||||||
|
with assert_raises(ClientError) as ce:
|
||||||
|
conn.untag_role(RoleName='my-role', TagKeys=['NOWAY!'])
|
||||||
|
assert 'Member must satisfy regular expression pattern: [\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]+' \
|
||||||
|
in ce.exception.response['Error']['Message']
|
||||||
|
assert 'tagKeys' in ce.exception.response['Error']['Message']
|
||||||
|
|
||||||
|
# With a role that doesn't exist:
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
conn.untag_role(RoleName='notarole', TagKeys=['somevalue'])
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_update_role_description():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
conn.delete_role(RoleName="my-role")
|
||||||
|
|
||||||
|
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
|
||||||
|
response = conn.update_role_description(RoleName="my-role", Description="test")
|
||||||
|
|
||||||
|
assert response['Role']['RoleName'] == 'my-role'
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_update_role():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
conn.delete_role(RoleName="my-role")
|
||||||
|
|
||||||
|
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
|
||||||
|
response = conn.update_role_description(RoleName="my-role", Description="test")
|
||||||
|
assert response['Role']['RoleName'] == 'my-role'
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_update_role():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
conn.delete_role(RoleName="my-role")
|
||||||
|
|
||||||
|
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
|
||||||
|
response = conn.update_role(RoleName="my-role", Description="test")
|
||||||
|
assert len(response.keys()) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_list_entities_for_policy():
|
||||||
|
import json
|
||||||
|
test_policy = json.dumps({
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Statement": [
|
||||||
|
{
|
||||||
|
"Action": "s3:ListBucket",
|
||||||
|
"Resource": "*",
|
||||||
|
"Effect": "Allow",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
|
||||||
|
conn.create_user(Path='/', UserName='testUser')
|
||||||
|
conn.create_group(Path='/', GroupName='testGroup')
|
||||||
|
conn.create_policy(
|
||||||
|
PolicyName='testPolicy',
|
||||||
|
Path='/',
|
||||||
|
PolicyDocument=test_policy,
|
||||||
|
Description='Test Policy'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Attach things to the user and group:
|
||||||
|
conn.put_user_policy(UserName='testUser', PolicyName='testPolicy', PolicyDocument=test_policy)
|
||||||
|
conn.put_group_policy(GroupName='testGroup', PolicyName='testPolicy', PolicyDocument=test_policy)
|
||||||
|
|
||||||
|
conn.attach_user_policy(UserName='testUser', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy')
|
||||||
|
conn.attach_group_policy(GroupName='testGroup', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy')
|
||||||
|
|
||||||
|
conn.add_user_to_group(UserName='testUser', GroupName='testGroup')
|
||||||
|
|
||||||
|
# Add things to the role:
|
||||||
|
conn.create_instance_profile(InstanceProfileName='ipn')
|
||||||
|
conn.add_role_to_instance_profile(InstanceProfileName='ipn', RoleName='my-role')
|
||||||
|
conn.tag_role(RoleName='my-role', Tags=[
|
||||||
|
{
|
||||||
|
'Key': 'somekey',
|
||||||
|
'Value': 'somevalue'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Key': 'someotherkey',
|
||||||
|
'Value': 'someothervalue'
|
||||||
|
}
|
||||||
|
])
|
||||||
|
conn.put_role_policy(RoleName='my-role', PolicyName='test-policy', PolicyDocument=test_policy)
|
||||||
|
conn.attach_role_policy(RoleName='my-role', PolicyArn='arn:aws:iam::123456789012:policy/testPolicy')
|
||||||
|
|
||||||
|
response = conn.list_entities_for_policy(
|
||||||
|
PolicyArn='arn:aws:iam::123456789012:policy/testPolicy',
|
||||||
|
EntityFilter='Role'
|
||||||
|
)
|
||||||
|
assert response['PolicyRoles'] == [{'RoleName': 'my-role'}]
|
||||||
|
|
||||||
|
response = conn.list_entities_for_policy(
|
||||||
|
PolicyArn='arn:aws:iam::123456789012:policy/testPolicy',
|
||||||
|
EntityFilter='User',
|
||||||
|
)
|
||||||
|
assert response['PolicyUsers'] == [{'UserName': 'testUser'}]
|
||||||
|
|
||||||
|
response = conn.list_entities_for_policy(
|
||||||
|
PolicyArn='arn:aws:iam::123456789012:policy/testPolicy',
|
||||||
|
EntityFilter='Group',
|
||||||
|
)
|
||||||
|
assert response['PolicyGroups'] == [{'GroupName': 'testGroup'}]
|
||||||
|
|
||||||
|
response = conn.list_entities_for_policy(
|
||||||
|
PolicyArn='arn:aws:iam::123456789012:policy/testPolicy',
|
||||||
|
EntityFilter='LocalManagedPolicy',
|
||||||
|
)
|
||||||
|
assert response['PolicyGroups'] == [{'GroupName': 'testGroup'}]
|
||||||
|
assert response['PolicyUsers'] == [{'UserName': 'testUser'}]
|
||||||
|
assert response['PolicyRoles'] == [{'RoleName': 'my-role'}]
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iam()
|
||||||
|
def test_create_role_no_path():
|
||||||
|
conn = boto3.client('iam', region_name='us-east-1')
|
||||||
|
resp = conn.create_role(RoleName='my-role', AssumeRolePolicyDocument='some policy', Description='test')
|
||||||
|
resp.get('Role').get('Arn').should.equal('arn:aws:iam::123456789012:role/my-role')
|
||||||
|
|
||||||
|
@ -5,6 +5,8 @@ import sure # noqa
|
|||||||
import boto3
|
import boto3
|
||||||
|
|
||||||
from moto import mock_iot
|
from moto import mock_iot
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
|
|
||||||
@mock_iot
|
@mock_iot
|
||||||
@ -261,6 +263,96 @@ def test_certs():
|
|||||||
res.should.have.key('certificates').which.should.have.length_of(0)
|
res.should.have.key('certificates').which.should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_delete_policy_validation():
|
||||||
|
doc = """{
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Statement":[
|
||||||
|
{
|
||||||
|
"Effect":"Allow",
|
||||||
|
"Action":[
|
||||||
|
"iot: *"
|
||||||
|
],
|
||||||
|
"Resource":"*"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
client = boto3.client('iot', region_name='ap-northeast-1')
|
||||||
|
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||||
|
cert_arn = cert['certificateArn']
|
||||||
|
policy_name = 'my-policy'
|
||||||
|
client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||||
|
client.attach_principal_policy(policyName=policy_name, principal=cert_arn)
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.delete_policy(policyName=policy_name)
|
||||||
|
e.exception.response['Error']['Message'].should.contain(
|
||||||
|
'The policy cannot be deleted as the policy is attached to one or more principals (name=%s)' % policy_name)
|
||||||
|
res = client.list_policies()
|
||||||
|
res.should.have.key('policies').which.should.have.length_of(1)
|
||||||
|
|
||||||
|
client.detach_principal_policy(policyName=policy_name, principal=cert_arn)
|
||||||
|
client.delete_policy(policyName=policy_name)
|
||||||
|
res = client.list_policies()
|
||||||
|
res.should.have.key('policies').which.should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_delete_certificate_validation():
|
||||||
|
doc = """{
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Statement":[
|
||||||
|
{
|
||||||
|
"Effect":"Allow",
|
||||||
|
"Action":[
|
||||||
|
"iot: *"
|
||||||
|
],
|
||||||
|
"Resource":"*"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
client = boto3.client('iot', region_name='ap-northeast-1')
|
||||||
|
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||||
|
cert_id = cert['certificateId']
|
||||||
|
cert_arn = cert['certificateArn']
|
||||||
|
policy_name = 'my-policy'
|
||||||
|
thing_name = 'thing-1'
|
||||||
|
client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||||
|
client.attach_principal_policy(policyName=policy_name, principal=cert_arn)
|
||||||
|
client.create_thing(thingName=thing_name)
|
||||||
|
client.attach_thing_principal(thingName=thing_name, principal=cert_arn)
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.delete_certificate(certificateId=cert_id)
|
||||||
|
e.exception.response['Error']['Message'].should.contain(
|
||||||
|
'Certificate must be deactivated (not ACTIVE) before deletion.')
|
||||||
|
res = client.list_certificates()
|
||||||
|
res.should.have.key('certificates').which.should.have.length_of(1)
|
||||||
|
|
||||||
|
client.update_certificate(certificateId=cert_id, newStatus='REVOKED')
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.delete_certificate(certificateId=cert_id)
|
||||||
|
e.exception.response['Error']['Message'].should.contain(
|
||||||
|
'Things must be detached before deletion (arn: %s)' % cert_arn)
|
||||||
|
res = client.list_certificates()
|
||||||
|
res.should.have.key('certificates').which.should.have.length_of(1)
|
||||||
|
|
||||||
|
client.detach_thing_principal(thingName=thing_name, principal=cert_arn)
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.delete_certificate(certificateId=cert_id)
|
||||||
|
e.exception.response['Error']['Message'].should.contain(
|
||||||
|
'Certificate policies must be detached before deletion (arn: %s)' % cert_arn)
|
||||||
|
res = client.list_certificates()
|
||||||
|
res.should.have.key('certificates').which.should.have.length_of(1)
|
||||||
|
|
||||||
|
client.detach_principal_policy(policyName=policy_name, principal=cert_arn)
|
||||||
|
client.delete_certificate(certificateId=cert_id)
|
||||||
|
res = client.list_certificates()
|
||||||
|
res.should.have.key('certificates').which.should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
@mock_iot
|
@mock_iot
|
||||||
def test_certs_create_inactive():
|
def test_certs_create_inactive():
|
||||||
client = boto3.client('iot', region_name='ap-northeast-1')
|
client = boto3.client('iot', region_name='ap-northeast-1')
|
||||||
@ -309,6 +401,47 @@ def test_policy():
|
|||||||
|
|
||||||
@mock_iot
|
@mock_iot
|
||||||
def test_principal_policy():
|
def test_principal_policy():
|
||||||
|
client = boto3.client('iot', region_name='ap-northeast-1')
|
||||||
|
policy_name = 'my-policy'
|
||||||
|
doc = '{}'
|
||||||
|
client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||||
|
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||||
|
cert_arn = cert['certificateArn']
|
||||||
|
|
||||||
|
client.attach_policy(policyName=policy_name, target=cert_arn)
|
||||||
|
|
||||||
|
res = client.list_principal_policies(principal=cert_arn)
|
||||||
|
res.should.have.key('policies').which.should.have.length_of(1)
|
||||||
|
for policy in res['policies']:
|
||||||
|
policy.should.have.key('policyName').which.should_not.be.none
|
||||||
|
policy.should.have.key('policyArn').which.should_not.be.none
|
||||||
|
|
||||||
|
# do nothing if policy have already attached to certificate
|
||||||
|
client.attach_policy(policyName=policy_name, target=cert_arn)
|
||||||
|
|
||||||
|
res = client.list_principal_policies(principal=cert_arn)
|
||||||
|
res.should.have.key('policies').which.should.have.length_of(1)
|
||||||
|
for policy in res['policies']:
|
||||||
|
policy.should.have.key('policyName').which.should_not.be.none
|
||||||
|
policy.should.have.key('policyArn').which.should_not.be.none
|
||||||
|
|
||||||
|
res = client.list_policy_principals(policyName=policy_name)
|
||||||
|
res.should.have.key('principals').which.should.have.length_of(1)
|
||||||
|
for principal in res['principals']:
|
||||||
|
principal.should_not.be.none
|
||||||
|
|
||||||
|
client.detach_policy(policyName=policy_name, target=cert_arn)
|
||||||
|
res = client.list_principal_policies(principal=cert_arn)
|
||||||
|
res.should.have.key('policies').which.should.have.length_of(0)
|
||||||
|
res = client.list_policy_principals(policyName=policy_name)
|
||||||
|
res.should.have.key('principals').which.should.have.length_of(0)
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.detach_policy(policyName=policy_name, target=cert_arn)
|
||||||
|
e.exception.response['Error']['Code'].should.equal('ResourceNotFoundException')
|
||||||
|
|
||||||
|
|
||||||
|
@mock_iot
|
||||||
|
def test_principal_policy_deprecated():
|
||||||
client = boto3.client('iot', region_name='ap-northeast-1')
|
client = boto3.client('iot', region_name='ap-northeast-1')
|
||||||
policy_name = 'my-policy'
|
policy_name = 'my-policy'
|
||||||
doc = '{}'
|
doc = '{}'
|
||||||
|
@ -1,16 +1,18 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import os, re
|
import os, re
|
||||||
|
|
||||||
import boto3
|
import boto3
|
||||||
import boto.kms
|
import boto.kms
|
||||||
|
import botocore.exceptions
|
||||||
from boto.exception import JSONResponseError
|
from boto.exception import JSONResponseError
|
||||||
from boto.kms.exceptions import AlreadyExistsException, NotFoundException
|
from boto.kms.exceptions import AlreadyExistsException, NotFoundException
|
||||||
|
|
||||||
|
from moto.kms.exceptions import NotFoundException as MotoNotFoundException
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
from moto import mock_kms, mock_kms_deprecated
|
from moto import mock_kms, mock_kms_deprecated
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime
|
||||||
from dateutil.tz import tzlocal
|
from dateutil.tz import tzutc
|
||||||
|
|
||||||
|
|
||||||
@mock_kms_deprecated
|
@mock_kms_deprecated
|
||||||
@ -128,7 +130,7 @@ def test_enable_key_rotation_via_arn():
|
|||||||
def test_enable_key_rotation_with_missing_key():
|
def test_enable_key_rotation_with_missing_key():
|
||||||
conn = boto.kms.connect_to_region("us-west-2")
|
conn = boto.kms.connect_to_region("us-west-2")
|
||||||
conn.enable_key_rotation.when.called_with(
|
conn.enable_key_rotation.when.called_with(
|
||||||
"not-a-key").should.throw(JSONResponseError)
|
"not-a-key").should.throw(NotFoundException)
|
||||||
|
|
||||||
|
|
||||||
@mock_kms_deprecated
|
@mock_kms_deprecated
|
||||||
@ -143,7 +145,7 @@ def test_enable_key_rotation_with_alias_name_should_fail():
|
|||||||
alias_key['KeyMetadata']['Arn'].should.equal(key['KeyMetadata']['Arn'])
|
alias_key['KeyMetadata']['Arn'].should.equal(key['KeyMetadata']['Arn'])
|
||||||
|
|
||||||
conn.enable_key_rotation.when.called_with(
|
conn.enable_key_rotation.when.called_with(
|
||||||
'alias/my-alias').should.throw(JSONResponseError)
|
'alias/my-alias').should.throw(NotFoundException)
|
||||||
|
|
||||||
|
|
||||||
@mock_kms_deprecated
|
@mock_kms_deprecated
|
||||||
@ -172,6 +174,7 @@ def test_encrypt():
|
|||||||
conn = boto.kms.connect_to_region("us-west-2")
|
conn = boto.kms.connect_to_region("us-west-2")
|
||||||
response = conn.encrypt('key_id', 'encryptme'.encode('utf-8'))
|
response = conn.encrypt('key_id', 'encryptme'.encode('utf-8'))
|
||||||
response['CiphertextBlob'].should.equal(b'ZW5jcnlwdG1l')
|
response['CiphertextBlob'].should.equal(b'ZW5jcnlwdG1l')
|
||||||
|
response['KeyId'].should.equal('key_id')
|
||||||
|
|
||||||
|
|
||||||
@mock_kms_deprecated
|
@mock_kms_deprecated
|
||||||
@ -185,14 +188,14 @@ def test_decrypt():
|
|||||||
def test_disable_key_rotation_with_missing_key():
|
def test_disable_key_rotation_with_missing_key():
|
||||||
conn = boto.kms.connect_to_region("us-west-2")
|
conn = boto.kms.connect_to_region("us-west-2")
|
||||||
conn.disable_key_rotation.when.called_with(
|
conn.disable_key_rotation.when.called_with(
|
||||||
"not-a-key").should.throw(JSONResponseError)
|
"not-a-key").should.throw(NotFoundException)
|
||||||
|
|
||||||
|
|
||||||
@mock_kms_deprecated
|
@mock_kms_deprecated
|
||||||
def test_get_key_rotation_status_with_missing_key():
|
def test_get_key_rotation_status_with_missing_key():
|
||||||
conn = boto.kms.connect_to_region("us-west-2")
|
conn = boto.kms.connect_to_region("us-west-2")
|
||||||
conn.get_key_rotation_status.when.called_with(
|
conn.get_key_rotation_status.when.called_with(
|
||||||
"not-a-key").should.throw(JSONResponseError)
|
"not-a-key").should.throw(NotFoundException)
|
||||||
|
|
||||||
|
|
||||||
@mock_kms_deprecated
|
@mock_kms_deprecated
|
||||||
@ -278,7 +281,7 @@ def test_put_key_policy_via_alias_should_not_update():
|
|||||||
target_key_id=key['KeyMetadata']['KeyId'])
|
target_key_id=key['KeyMetadata']['KeyId'])
|
||||||
|
|
||||||
conn.put_key_policy.when.called_with(
|
conn.put_key_policy.when.called_with(
|
||||||
'alias/my-key-alias', 'default', 'new policy').should.throw(JSONResponseError)
|
'alias/my-key-alias', 'default', 'new policy').should.throw(NotFoundException)
|
||||||
|
|
||||||
policy = conn.get_key_policy(key['KeyMetadata']['KeyId'], 'default')
|
policy = conn.get_key_policy(key['KeyMetadata']['KeyId'], 'default')
|
||||||
policy['Policy'].should.equal('my policy')
|
policy['Policy'].should.equal('my policy')
|
||||||
@ -598,9 +601,9 @@ def test__assert_valid_key_id():
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
_assert_valid_key_id.when.called_with(
|
_assert_valid_key_id.when.called_with(
|
||||||
"not-a-key").should.throw(JSONResponseError)
|
"not-a-key").should.throw(MotoNotFoundException)
|
||||||
_assert_valid_key_id.when.called_with(
|
_assert_valid_key_id.when.called_with(
|
||||||
str(uuid.uuid4())).should_not.throw(JSONResponseError)
|
str(uuid.uuid4())).should_not.throw(MotoNotFoundException)
|
||||||
|
|
||||||
|
|
||||||
@mock_kms_deprecated
|
@mock_kms_deprecated
|
||||||
@ -608,9 +611,9 @@ def test__assert_default_policy():
|
|||||||
from moto.kms.responses import _assert_default_policy
|
from moto.kms.responses import _assert_default_policy
|
||||||
|
|
||||||
_assert_default_policy.when.called_with(
|
_assert_default_policy.when.called_with(
|
||||||
"not-default").should.throw(JSONResponseError)
|
"not-default").should.throw(MotoNotFoundException)
|
||||||
_assert_default_policy.when.called_with(
|
_assert_default_policy.when.called_with(
|
||||||
"default").should_not.throw(JSONResponseError)
|
"default").should_not.throw(MotoNotFoundException)
|
||||||
|
|
||||||
|
|
||||||
@mock_kms
|
@mock_kms
|
||||||
@ -661,7 +664,7 @@ def test_schedule_key_deletion():
|
|||||||
KeyId=key['KeyMetadata']['KeyId']
|
KeyId=key['KeyMetadata']['KeyId']
|
||||||
)
|
)
|
||||||
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
||||||
assert response['DeletionDate'] == datetime(2015, 1, 31, 12, 0, tzinfo=tzlocal())
|
assert response['DeletionDate'] == datetime(2015, 1, 31, 12, 0, tzinfo=tzutc())
|
||||||
else:
|
else:
|
||||||
# Can't manipulate time in server mode
|
# Can't manipulate time in server mode
|
||||||
response = client.schedule_key_deletion(
|
response = client.schedule_key_deletion(
|
||||||
@ -686,7 +689,7 @@ def test_schedule_key_deletion_custom():
|
|||||||
PendingWindowInDays=7
|
PendingWindowInDays=7
|
||||||
)
|
)
|
||||||
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
||||||
assert response['DeletionDate'] == datetime(2015, 1, 8, 12, 0, tzinfo=tzlocal())
|
assert response['DeletionDate'] == datetime(2015, 1, 8, 12, 0, tzinfo=tzutc())
|
||||||
else:
|
else:
|
||||||
# Can't manipulate time in server mode
|
# Can't manipulate time in server mode
|
||||||
response = client.schedule_key_deletion(
|
response = client.schedule_key_deletion(
|
||||||
@ -717,3 +720,265 @@ def test_cancel_key_deletion():
|
|||||||
assert result["KeyMetadata"]["Enabled"] == False
|
assert result["KeyMetadata"]["Enabled"] == False
|
||||||
assert result["KeyMetadata"]["KeyState"] == 'Disabled'
|
assert result["KeyMetadata"]["KeyState"] == 'Disabled'
|
||||||
assert 'DeletionDate' not in result["KeyMetadata"]
|
assert 'DeletionDate' not in result["KeyMetadata"]
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_update_key_description():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
key = client.create_key(Description='old_description')
|
||||||
|
key_id = key['KeyMetadata']['KeyId']
|
||||||
|
|
||||||
|
result = client.update_key_description(KeyId=key_id, Description='new_description')
|
||||||
|
assert 'ResponseMetadata' in result
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_tag_resource():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
key = client.create_key(Description='cancel-key-deletion')
|
||||||
|
response = client.schedule_key_deletion(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId']
|
||||||
|
)
|
||||||
|
|
||||||
|
keyid = response['KeyId']
|
||||||
|
response = client.tag_resource(
|
||||||
|
KeyId=keyid,
|
||||||
|
Tags=[
|
||||||
|
{
|
||||||
|
'TagKey': 'string',
|
||||||
|
'TagValue': 'string'
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Shouldn't have any data, just header
|
||||||
|
assert len(response.keys()) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_list_resource_tags():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
key = client.create_key(Description='cancel-key-deletion')
|
||||||
|
response = client.schedule_key_deletion(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId']
|
||||||
|
)
|
||||||
|
|
||||||
|
keyid = response['KeyId']
|
||||||
|
response = client.tag_resource(
|
||||||
|
KeyId=keyid,
|
||||||
|
Tags=[
|
||||||
|
{
|
||||||
|
'TagKey': 'string',
|
||||||
|
'TagValue': 'string'
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.list_resource_tags(KeyId=keyid)
|
||||||
|
assert response['Tags'][0]['TagKey'] == 'string'
|
||||||
|
assert response['Tags'][0]['TagValue'] == 'string'
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_generate_data_key_sizes():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
key = client.create_key(Description='generate-data-key-size')
|
||||||
|
|
||||||
|
resp1 = client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'],
|
||||||
|
KeySpec='AES_256'
|
||||||
|
)
|
||||||
|
resp2 = client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'],
|
||||||
|
KeySpec='AES_128'
|
||||||
|
)
|
||||||
|
resp3 = client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'],
|
||||||
|
NumberOfBytes=64
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(resp1['Plaintext']) == 32
|
||||||
|
assert len(resp2['Plaintext']) == 16
|
||||||
|
assert len(resp3['Plaintext']) == 64
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_generate_data_key_decrypt():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
key = client.create_key(Description='generate-data-key-decrypt')
|
||||||
|
|
||||||
|
resp1 = client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'],
|
||||||
|
KeySpec='AES_256'
|
||||||
|
)
|
||||||
|
resp2 = client.decrypt(
|
||||||
|
CiphertextBlob=resp1['CiphertextBlob']
|
||||||
|
)
|
||||||
|
|
||||||
|
assert resp1['Plaintext'] == resp2['Plaintext']
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_generate_data_key_invalid_size_params():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
key = client.create_key(Description='generate-data-key-size')
|
||||||
|
|
||||||
|
with assert_raises(botocore.exceptions.ClientError) as err:
|
||||||
|
client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'],
|
||||||
|
KeySpec='AES_257'
|
||||||
|
)
|
||||||
|
|
||||||
|
with assert_raises(botocore.exceptions.ClientError) as err:
|
||||||
|
client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'],
|
||||||
|
KeySpec='AES_128',
|
||||||
|
NumberOfBytes=16
|
||||||
|
)
|
||||||
|
|
||||||
|
with assert_raises(botocore.exceptions.ClientError) as err:
|
||||||
|
client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'],
|
||||||
|
NumberOfBytes=2048
|
||||||
|
)
|
||||||
|
|
||||||
|
with assert_raises(botocore.exceptions.ClientError) as err:
|
||||||
|
client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_generate_data_key_invalid_key():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
key = client.create_key(Description='generate-data-key-size')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.generate_data_key(
|
||||||
|
KeyId='alias/randomnonexistantkey',
|
||||||
|
KeySpec='AES_256'
|
||||||
|
)
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.generate_data_key(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'] + '4',
|
||||||
|
KeySpec='AES_256'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_generate_data_key_without_plaintext_decrypt():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
key = client.create_key(Description='generate-data-key-decrypt')
|
||||||
|
|
||||||
|
resp1 = client.generate_data_key_without_plaintext(
|
||||||
|
KeyId=key['KeyMetadata']['KeyId'],
|
||||||
|
KeySpec='AES_256'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'Plaintext' not in resp1
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_enable_key_rotation_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.enable_key_rotation(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_disable_key_rotation_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.disable_key_rotation(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_enable_key_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.enable_key(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_disable_key_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.disable_key(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_cancel_key_deletion_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.cancel_key_deletion(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_schedule_key_deletion_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.schedule_key_deletion(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_get_key_rotation_status_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.get_key_rotation_status(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_get_key_policy_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.get_key_policy(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02',
|
||||||
|
PolicyName='default'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_list_key_policies_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.list_key_policies(
|
||||||
|
KeyId='12366f9b-1230-123d-123e-123e6ae60c02'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_kms
|
||||||
|
def test_put_key_policy_key_not_found():
|
||||||
|
client = boto3.client('kms', region_name='us-east-1')
|
||||||
|
|
||||||
|
with assert_raises(client.exceptions.NotFoundException):
|
||||||
|
client.put_key_policy(
|
||||||
|
KeyId='00000000-0000-0000-0000-000000000000',
|
||||||
|
PolicyName='default',
|
||||||
|
Policy='new policy'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
8
tests/test_packages/__init__.py
Normal file
8
tests/test_packages/__init__.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import logging
|
||||||
|
# Disable extra logging for tests
|
||||||
|
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('boto3').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('botocore').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('nose').setLevel(logging.CRITICAL)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user