diff --git a/.travis.yml b/.travis.yml
index 8d22aa98f..f1854db54 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -35,9 +35,10 @@ install:
if [ "$TEST_SERVER_MODE" = "true" ]; then
python wait_for.py
fi
+before_script:
+- if [[ $TRAVIS_PYTHON_VERSION == "3.7" ]]; then make lint; fi
script:
- make test-only
-- if [[ $TRAVIS_PYTHON_VERSION == "3.7" ]]; then make lint; fi
after_success:
- coveralls
before_deploy:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f42619b33..732dad23a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,189 @@
Moto Changelog
===================
+1.3.14
+-----
+
+ General Changes:
+ * Support for Python 3.8
+ * Linting: Black is now enforced.
+
+ New Services:
+ * Athena
+ * Config
+ * DataSync
+ * Step Functions
+
+ New methods:
+ * Athena:
+ * create_work_group()
+ * list_work_groups()
+ * API Gateway:
+ * delete_stage()
+ * update_api_key()
+ * CloudWatch Logs
+ * list_tags_log_group()
+ * tag_log_group()
+ * untag_log_group()
+ * Config
+ * batch_get_resource_config()
+ * delete_aggregation_authorization()
+ * delete_configuration_aggregator()
+ * describe_aggregation_authorizations()
+ * describe_configuration_aggregators()
+ * get_resource_config_history()
+ * list_aggregate_discovered_resources() (For S3)
+ * list_discovered_resources() (For S3)
+ * put_aggregation_authorization()
+ * put_configuration_aggregator()
+ * Cognito
+ * assume_role_with_web_identity()
+ * describe_identity_pool()
+ * get_open_id_token()
+ * update_user_pool_domain()
+ * DataSync:
+ * cancel_task_execution()
+ * create_location()
+ * create_task()
+ * start_task_execution()
+ * EC2:
+ * create_launch_template()
+ * create_launch_template_version()
+ * describe_launch_template_versions()
+ * describe_launch_templates()
+ * ECS
+ * decrypt()
+ * encrypt()
+ * generate_data_key_without_plaintext()
+ * generate_random()
+ * re_encrypt()
+ * Glue
+ * batch_get_partition()
+ * IAM
+ * create_open_id_connect_provider()
+ * create_virtual_mfa_device()
+ * delete_account_password_policy()
+ * delete_open_id_connect_provider()
+ * delete_policy()
+ * delete_virtual_mfa_device()
+ * get_account_password_policy()
+ * get_open_id_connect_provider()
+ * list_open_id_connect_providers()
+ * list_virtual_mfa_devices()
+ * update_account_password_policy()
+ * Lambda
+ * create_event_source_mapping()
+ * delete_event_source_mapping()
+ * get_event_source_mapping()
+ * list_event_source_mappings()
+ * update_configuration()
+ * update_event_source_mapping()
+ * update_function_code()
+ * KMS
+ * decrypt()
+ * encrypt()
+ * generate_data_key_without_plaintext()
+ * generate_random()
+ * re_encrypt()
+ * SES
+ * send_templated_email()
+ * SNS
+ * add_permission()
+ * list_tags_for_resource()
+ * remove_permission()
+ * tag_resource()
+ * untag_resource()
+ * SSM
+ * describe_parameters()
+ * get_parameter_history()
+ * Step Functions
+ * create_state_machine()
+ * delete_state_machine()
+ * describe_execution()
+ * describe_state_machine()
+ * describe_state_machine_for_execution()
+ * list_executions()
+ * list_state_machines()
+ * list_tags_for_resource()
+ * start_execution()
+ * stop_execution()
+ SQS
+ * list_queue_tags()
+ * send_message_batch()
+
+ General updates:
+ * API Gateway:
+ * Now generates valid IDs
+ * API Keys, Usage Plans now support tags
+ * ACM:
+ * list_certificates() accepts the status parameter
+ * Batch:
+ * submit_job() can now be called with job name
+ * CloudWatch Events
+ * Multi-region support
+ * CloudWatch Logs
+ * get_log_events() now supports pagination
+ * Cognito:
+ * Now throws UsernameExistsException for known users
+ * DynamoDB
+ * update_item() now supports lists, the list_append-operator and removing nested items
+ * delete_item() now supports condition expressions
+ * get_item() now supports projection expression
+ * Enforces 400KB item size
+ * Validation on duplicate keys in batch_get_item()
+ * Validation on AttributeDefinitions on create_table()
+ * Validation on Query Key Expression
+ * Projection Expressions now support nested attributes
+ * EC2:
+ * Change DesiredCapacity behaviour for AutoScaling groups
+ * Extend list of supported EC2 ENI properties
+ * Create ASG from Instance now supported
+ * ASG attached to a terminated instance now recreate the instance of required
+ * Unify OwnerIDs
+ * ECS
+ * Task definition revision deregistration: remaining revisions now remain unchanged
+ * Fix created_at/updated_at format for deployments
+ * Support multiple regions
+ * ELB
+ * Return correct response then describing target health of stopped instances
+ * Target groups now longer show terminated instances
+ * 'fixed-response' now a supported action-type
+ * Now supports redirect: authenticate-cognito
+ * Kinesis FireHose
+ * Now supports ExtendedS3DestinationConfiguration
+ * KMS
+ * Now supports tags
+ * Organizations
+ * create_organization() now creates Master account
+ * Redshift
+ * Fix timezone problems when creating a cluster
+ * Support for enhanced_vpc_routing-parameter
+ * Route53
+ * Implemented UPSERT for change_resource_records
+ * S3:
+ * Support partNumber for head_object
+ * Support for INTELLIGENT_TIERING, GLACIER and DEEP_ARCHIVE
+ * Fix KeyCount attribute
+ * list_objects now supports pagination (next_marker)
+ * Support tagging for versioned objects
+ * STS
+ * Implement validation on policy length
+ * Lambda
+ * Support EventSourceMappings for SQS, DynamoDB
+ * get_function(), delete_function() now both support ARNs as parameters
+ * IAM
+ * Roles now support tags
+ * Policy Validation: SID can be empty
+ * Validate roles have no attachments when deleting
+ * SecretsManager
+ * Now supports binary secrets
+ * IOT
+ * update_thing_shadow validation
+ * delete_thing now also removed principals
+ * SQS
+ * Tags supported for create_queue()
+
+
1.3.7
-----
diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md
index 2e5f055b9..3d8338cfc 100644
--- a/IMPLEMENTATION_COVERAGE.md
+++ b/IMPLEMENTATION_COVERAGE.md
@@ -171,7 +171,7 @@
- [ ] update_webhook
## apigateway
-24% implemented
+25% implemented
- [ ] create_api_key
- [ ] create_authorizer
- [ ] create_base_path_mapping
@@ -204,7 +204,7 @@
- [ ] delete_request_validator
- [X] delete_resource
- [X] delete_rest_api
-- [ ] delete_stage
+- [X] delete_stage
- [X] delete_usage_plan
- [X] delete_usage_plan_key
- [ ] delete_vpc_link
@@ -687,12 +687,17 @@
## ce
0% implemented
- [ ] get_cost_and_usage
+- [ ] get_cost_and_usage_with_resources
- [ ] get_cost_forecast
- [ ] get_dimension_values
- [ ] get_reservation_coverage
- [ ] get_reservation_purchase_recommendation
- [ ] get_reservation_utilization
- [ ] get_rightsizing_recommendation
+- [ ] get_savings_plans_coverage
+- [ ] get_savings_plans_purchase_recommendation
+- [ ] get_savings_plans_utilization
+- [ ] get_savings_plans_utilization_details
- [ ] get_tags
- [ ] get_usage_forecast
@@ -701,6 +706,7 @@
- [ ] associate_phone_number_with_user
- [ ] associate_phone_numbers_with_voice_connector
- [ ] associate_phone_numbers_with_voice_connector_group
+- [ ] batch_create_room_membership
- [ ] batch_delete_phone_number
- [ ] batch_suspend_user
- [ ] batch_unsuspend_user
@@ -709,11 +715,15 @@
- [ ] create_account
- [ ] create_bot
- [ ] create_phone_number_order
+- [ ] create_room
+- [ ] create_room_membership
- [ ] create_voice_connector
- [ ] create_voice_connector_group
- [ ] delete_account
- [ ] delete_events_configuration
- [ ] delete_phone_number
+- [ ] delete_room
+- [ ] delete_room_membership
- [ ] delete_voice_connector
- [ ] delete_voice_connector_group
- [ ] delete_voice_connector_origination
@@ -731,6 +741,7 @@
- [ ] get_phone_number
- [ ] get_phone_number_order
- [ ] get_phone_number_settings
+- [ ] get_room
- [ ] get_user
- [ ] get_user_settings
- [ ] get_voice_connector
@@ -745,6 +756,8 @@
- [ ] list_bots
- [ ] list_phone_number_orders
- [ ] list_phone_numbers
+- [ ] list_room_memberships
+- [ ] list_rooms
- [ ] list_users
- [ ] list_voice_connector_groups
- [ ] list_voice_connector_termination_credentials
@@ -766,6 +779,8 @@
- [ ] update_global_settings
- [ ] update_phone_number
- [ ] update_phone_number_settings
+- [ ] update_room
+- [ ] update_room_membership
- [ ] update_user
- [ ] update_user_settings
- [ ] update_voice_connector
@@ -1003,6 +1018,7 @@
- [ ] delete_suggester
- [ ] describe_analysis_schemes
- [ ] describe_availability_options
+- [ ] describe_domain_endpoint_options
- [ ] describe_domains
- [ ] describe_expressions
- [ ] describe_index_fields
@@ -1012,6 +1028,7 @@
- [ ] index_documents
- [ ] list_domain_names
- [ ] update_availability_options
+- [ ] update_domain_endpoint_options
- [ ] update_scaling_parameters
- [ ] update_service_access_policies
@@ -1028,9 +1045,11 @@
- [ ] delete_trail
- [ ] describe_trails
- [ ] get_event_selectors
+- [ ] get_trail
- [ ] get_trail_status
- [ ] list_public_keys
- [ ] list_tags
+- [ ] list_trails
- [ ] lookup_events
- [ ] put_event_selectors
- [ ] remove_tags
@@ -1252,6 +1271,22 @@
- [ ] update_team_member
- [ ] update_user_profile
+## codestar-notifications
+0% implemented
+- [ ] create_notification_rule
+- [ ] delete_notification_rule
+- [ ] delete_target
+- [ ] describe_notification_rule
+- [ ] list_event_types
+- [ ] list_notification_rules
+- [ ] list_tags_for_resource
+- [ ] list_targets
+- [ ] subscribe
+- [ ] tag_resource
+- [ ] unsubscribe
+- [ ] untag_resource
+- [ ] update_notification_rule
+
## cognito-identity
28% implemented
- [X] create_identity_pool
@@ -1545,10 +1580,13 @@
- [ ] list_queues
- [ ] list_routing_profiles
- [ ] list_security_profiles
+- [ ] list_tags_for_resource
- [ ] list_user_hierarchy_groups
- [ ] list_users
- [ ] start_outbound_voice_contact
- [ ] stop_contact
+- [ ] tag_resource
+- [ ] untag_resource
- [ ] update_contact_attributes
- [ ] update_user_hierarchy
- [ ] update_user_identity_info
@@ -1563,6 +1601,31 @@
- [ ] modify_report_definition
- [ ] put_report_definition
+## dataexchange
+0% implemented
+- [ ] cancel_job
+- [ ] create_data_set
+- [ ] create_job
+- [ ] create_revision
+- [ ] delete_asset
+- [ ] delete_data_set
+- [ ] delete_revision
+- [ ] get_asset
+- [ ] get_data_set
+- [ ] get_job
+- [ ] get_revision
+- [ ] list_data_set_revisions
+- [ ] list_data_sets
+- [ ] list_jobs
+- [ ] list_revision_assets
+- [ ] list_tags_for_resource
+- [ ] start_job
+- [ ] tag_resource
+- [ ] untag_resource
+- [ ] update_asset
+- [ ] update_data_set
+- [ ] update_revision
+
## datapipeline
42% implemented
- [X] activate_pipeline
@@ -1586,17 +1649,17 @@
- [ ] validate_pipeline_definition
## datasync
-0% implemented
-- [ ] cancel_task_execution
+22% implemented
+- [X] cancel_task_execution
- [ ] create_agent
- [ ] create_location_efs
- [ ] create_location_nfs
- [ ] create_location_s3
- [ ] create_location_smb
-- [ ] create_task
+- [X] create_task
- [ ] delete_agent
-- [ ] delete_location
-- [ ] delete_task
+- [X] delete_location
+- [X] delete_task
- [ ] describe_agent
- [ ] describe_location_efs
- [ ] describe_location_nfs
@@ -1609,11 +1672,11 @@
- [ ] list_tags_for_resource
- [ ] list_task_executions
- [ ] list_tasks
-- [ ] start_task_execution
+- [X] start_task_execution
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_agent
-- [ ] update_task
+- [X] update_task
## dax
0% implemented
@@ -1799,6 +1862,9 @@
- [ ] delete_lifecycle_policy
- [ ] get_lifecycle_policies
- [ ] get_lifecycle_policy
+- [ ] list_tags_for_resource
+- [ ] tag_resource
+- [ ] untag_resource
- [ ] update_lifecycle_policy
## dms
@@ -2217,8 +2283,8 @@
- [X] describe_volumes
- [ ] describe_volumes_modifications
- [X] describe_vpc_attribute
-- [ ] describe_vpc_classic_link
-- [ ] describe_vpc_classic_link_dns_support
+- [X] describe_vpc_classic_link
+- [X] describe_vpc_classic_link_dns_support
- [ ] describe_vpc_endpoint_connection_notifications
- [ ] describe_vpc_endpoint_connections
- [ ] describe_vpc_endpoint_service_configurations
@@ -2237,8 +2303,8 @@
- [ ] disable_ebs_encryption_by_default
- [ ] disable_transit_gateway_route_table_propagation
- [ ] disable_vgw_route_propagation
-- [ ] disable_vpc_classic_link
-- [ ] disable_vpc_classic_link_dns_support
+- [X] disable_vpc_classic_link
+- [X] disable_vpc_classic_link_dns_support
- [X] disassociate_address
- [ ] disassociate_client_vpn_target_network
- [ ] disassociate_iam_instance_profile
@@ -2250,8 +2316,8 @@
- [ ] enable_transit_gateway_route_table_propagation
- [ ] enable_vgw_route_propagation
- [ ] enable_volume_io
-- [ ] enable_vpc_classic_link
-- [ ] enable_vpc_classic_link_dns_support
+- [X] enable_vpc_classic_link
+- [X] enable_vpc_classic_link_dns_support
- [ ] export_client_vpn_client_certificate_revocation_list
- [ ] export_client_vpn_client_configuration
- [ ] export_image
@@ -2461,16 +2527,22 @@
## eks
0% implemented
- [ ] create_cluster
+- [ ] create_nodegroup
- [ ] delete_cluster
+- [ ] delete_nodegroup
- [ ] describe_cluster
+- [ ] describe_nodegroup
- [ ] describe_update
- [ ] list_clusters
+- [ ] list_nodegroups
- [ ] list_tags_for_resource
- [ ] list_updates
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_cluster_config
- [ ] update_cluster_version
+- [ ] update_nodegroup_config
+- [ ] update_nodegroup_version
## elasticache
0% implemented
@@ -2718,12 +2790,12 @@
- [ ] upgrade_elasticsearch_domain
## events
-48% implemented
+58% implemented
- [ ] activate_event_source
-- [ ] create_event_bus
+- [X] create_event_bus
- [ ] create_partner_event_source
- [ ] deactivate_event_source
-- [ ] delete_event_bus
+- [X] delete_event_bus
- [ ] delete_partner_event_source
- [X] delete_rule
- [X] describe_event_bus
@@ -2732,7 +2804,7 @@
- [X] describe_rule
- [X] disable_rule
- [X] enable_rule
-- [ ] list_event_buses
+- [X] list_event_buses
- [ ] list_event_sources
- [ ] list_partner_event_source_accounts
- [ ] list_partner_event_sources
@@ -3217,6 +3289,7 @@
- [ ] create_filter
- [ ] create_ip_set
- [ ] create_members
+- [ ] create_publishing_destination
- [ ] create_sample_findings
- [ ] create_threat_intel_set
- [ ] decline_invitations
@@ -3225,7 +3298,9 @@
- [ ] delete_invitations
- [ ] delete_ip_set
- [ ] delete_members
+- [ ] delete_publishing_destination
- [ ] delete_threat_intel_set
+- [ ] describe_publishing_destination
- [ ] disassociate_from_master_account
- [ ] disassociate_members
- [ ] get_detector
@@ -3244,6 +3319,7 @@
- [ ] list_invitations
- [ ] list_ip_sets
- [ ] list_members
+- [ ] list_publishing_destinations
- [ ] list_tags_for_resource
- [ ] list_threat_intel_sets
- [ ] start_monitoring_members
@@ -3255,6 +3331,7 @@
- [ ] update_filter
- [ ] update_findings_feedback
- [ ] update_ip_set
+- [ ] update_publishing_destination
- [ ] update_threat_intel_set
## health
@@ -3267,7 +3344,7 @@
- [ ] describe_events
## iam
-62% implemented
+65% implemented
- [ ] add_client_id_to_open_id_connect_provider
- [X] add_role_to_instance_profile
- [X] add_user_to_group
@@ -3293,7 +3370,7 @@
- [X] delete_access_key
- [X] delete_account_alias
- [X] delete_account_password_policy
-- [ ] delete_group
+- [X] delete_group
- [ ] delete_group_policy
- [ ] delete_instance_profile
- [X] delete_login_profile
@@ -3323,7 +3400,7 @@
- [X] get_access_key_last_used
- [X] get_account_authorization_details
- [X] get_account_password_policy
-- [ ] get_account_summary
+- [X] get_account_summary
- [ ] get_context_keys_for_custom_policy
- [ ] get_context_keys_for_principal_policy
- [X] get_credential_report
@@ -3405,7 +3482,7 @@
- [X] update_signing_certificate
- [ ] update_ssh_public_key
- [X] update_user
-- [ ] upload_server_certificate
+- [X] upload_server_certificate
- [X] upload_signing_certificate
- [ ] upload_ssh_public_key
@@ -3459,7 +3536,7 @@
- [ ] update_assessment_target
## iot
-23% implemented
+22% implemented
- [ ] accept_certificate_transfer
- [ ] add_thing_to_billing_group
- [X] add_thing_to_thing_group
@@ -3544,11 +3621,13 @@
- [X] detach_thing_principal
- [ ] disable_topic_rule
- [ ] enable_topic_rule
+- [ ] get_cardinality
- [ ] get_effective_policies
- [ ] get_indexing_configuration
- [ ] get_job_document
- [ ] get_logging_options
- [ ] get_ota_update
+- [ ] get_percentiles
- [X] get_policy
- [ ] get_policy_version
- [ ] get_registration_code
@@ -3977,46 +4056,46 @@
- [ ] update_resource
## lambda
-0% implemented
+41% implemented
- [ ] add_layer_version_permission
- [ ] add_permission
- [ ] create_alias
-- [ ] create_event_source_mapping
-- [ ] create_function
+- [X] create_event_source_mapping
+- [X] create_function
- [ ] delete_alias
-- [ ] delete_event_source_mapping
-- [ ] delete_function
+- [X] delete_event_source_mapping
+- [X] delete_function
- [ ] delete_function_concurrency
- [ ] delete_layer_version
- [ ] get_account_settings
- [ ] get_alias
-- [ ] get_event_source_mapping
-- [ ] get_function
+- [X] get_event_source_mapping
+- [X] get_function
- [ ] get_function_configuration
- [ ] get_layer_version
- [ ] get_layer_version_by_arn
- [ ] get_layer_version_policy
- [ ] get_policy
-- [ ] invoke
+- [X] invoke
- [ ] invoke_async
- [ ] list_aliases
-- [ ] list_event_source_mappings
-- [ ] list_functions
+- [X] list_event_source_mappings
+- [X] list_functions
- [ ] list_layer_versions
- [ ] list_layers
-- [ ] list_tags
-- [ ] list_versions_by_function
+- [X] list_tags
+- [X] list_versions_by_function
- [ ] publish_layer_version
- [ ] publish_version
- [ ] put_function_concurrency
- [ ] remove_layer_version_permission
- [ ] remove_permission
-- [ ] tag_resource
-- [ ] untag_resource
+- [X] tag_resource
+- [X] untag_resource
- [ ] update_alias
-- [ ] update_event_source_mapping
-- [ ] update_function_code
-- [ ] update_function_configuration
+- [X] update_event_source_mapping
+- [X] update_function_code
+- [X] update_function_configuration
## lex-models
0% implemented
@@ -4295,6 +4374,15 @@
- [ ] reject_invitation
- [ ] vote_on_proposal
+## marketplace-catalog
+0% implemented
+- [ ] cancel_change_set
+- [ ] describe_change_set
+- [ ] describe_entity
+- [ ] list_change_sets
+- [ ] list_entities
+- [ ] start_change_set
+
## marketplace-entitlement
0% implemented
- [ ] get_entitlements
@@ -4723,7 +4811,7 @@
- [ ] update_server_engine_attributes
## organizations
-41% implemented
+43% implemented
- [ ] accept_handshake
- [X] attach_policy
- [ ] cancel_handshake
@@ -4737,7 +4825,7 @@
- [ ] delete_organizational_unit
- [ ] delete_policy
- [X] describe_account
-- [ ] describe_create_account_status
+- [X] describe_create_account_status
- [ ] describe_handshake
- [X] describe_organization
- [X] describe_organizational_unit
@@ -4773,6 +4861,7 @@
## personalize
0% implemented
+- [ ] create_batch_inference_job
- [ ] create_campaign
- [ ] create_dataset
- [ ] create_dataset_group
@@ -4788,6 +4877,7 @@
- [ ] delete_schema
- [ ] delete_solution
- [ ] describe_algorithm
+- [ ] describe_batch_inference_job
- [ ] describe_campaign
- [ ] describe_dataset
- [ ] describe_dataset_group
@@ -4799,6 +4889,7 @@
- [ ] describe_solution
- [ ] describe_solution_version
- [ ] get_solution_metrics
+- [ ] list_batch_inference_jobs
- [ ] list_campaigns
- [ ] list_dataset_groups
- [ ] list_dataset_import_jobs
@@ -4831,6 +4922,7 @@
- [ ] create_email_template
- [ ] create_export_job
- [ ] create_import_job
+- [ ] create_journey
- [ ] create_push_template
- [ ] create_segment
- [ ] create_sms_template
@@ -4847,6 +4939,7 @@
- [ ] delete_endpoint
- [ ] delete_event_stream
- [ ] delete_gcm_channel
+- [ ] delete_journey
- [ ] delete_push_template
- [ ] delete_segment
- [ ] delete_sms_channel
@@ -4879,6 +4972,10 @@
- [ ] get_gcm_channel
- [ ] get_import_job
- [ ] get_import_jobs
+- [ ] get_journey
+- [ ] get_journey_date_range_kpi
+- [ ] get_journey_execution_activity_metrics
+- [ ] get_journey_execution_metrics
- [ ] get_push_template
- [ ] get_segment
- [ ] get_segment_export_jobs
@@ -4890,6 +4987,7 @@
- [ ] get_sms_template
- [ ] get_user_endpoints
- [ ] get_voice_channel
+- [ ] list_journeys
- [ ] list_tags_for_resource
- [ ] list_templates
- [ ] phone_number_validate
@@ -4913,6 +5011,8 @@
- [ ] update_endpoint
- [ ] update_endpoints_batch
- [ ] update_gcm_channel
+- [ ] update_journey
+- [ ] update_journey_state
- [ ] update_push_template
- [ ] update_segment
- [ ] update_sms_channel
@@ -5661,6 +5761,17 @@
0% implemented
- [ ] invoke_endpoint
+## savingsplans
+0% implemented
+- [ ] create_savings_plan
+- [ ] describe_savings_plan_rates
+- [ ] describe_savings_plans
+- [ ] describe_savings_plans_offering_rates
+- [ ] describe_savings_plans_offerings
+- [ ] list_tags_for_resource
+- [ ] tag_resource
+- [ ] untag_resource
+
## sdb
0% implemented
- [ ] batch_delete_attributes
@@ -5954,6 +6065,51 @@
- [X] verify_email_address
- [X] verify_email_identity
+## sesv2
+0% implemented
+- [ ] create_configuration_set
+- [ ] create_configuration_set_event_destination
+- [ ] create_dedicated_ip_pool
+- [ ] create_deliverability_test_report
+- [ ] create_email_identity
+- [ ] delete_configuration_set
+- [ ] delete_configuration_set_event_destination
+- [ ] delete_dedicated_ip_pool
+- [ ] delete_email_identity
+- [ ] get_account
+- [ ] get_blacklist_reports
+- [ ] get_configuration_set
+- [ ] get_configuration_set_event_destinations
+- [ ] get_dedicated_ip
+- [ ] get_dedicated_ips
+- [ ] get_deliverability_dashboard_options
+- [ ] get_deliverability_test_report
+- [ ] get_domain_deliverability_campaign
+- [ ] get_domain_statistics_report
+- [ ] get_email_identity
+- [ ] list_configuration_sets
+- [ ] list_dedicated_ip_pools
+- [ ] list_deliverability_test_reports
+- [ ] list_domain_deliverability_campaigns
+- [ ] list_email_identities
+- [ ] list_tags_for_resource
+- [ ] put_account_dedicated_ip_warmup_attributes
+- [ ] put_account_sending_attributes
+- [ ] put_configuration_set_delivery_options
+- [ ] put_configuration_set_reputation_options
+- [ ] put_configuration_set_sending_options
+- [ ] put_configuration_set_tracking_options
+- [ ] put_dedicated_ip_in_pool
+- [ ] put_dedicated_ip_warmup_attributes
+- [ ] put_deliverability_dashboard_option
+- [ ] put_email_identity_dkim_attributes
+- [ ] put_email_identity_feedback_attributes
+- [ ] put_email_identity_mail_from_attributes
+- [ ] send_email
+- [ ] tag_resource
+- [ ] untag_resource
+- [ ] update_configuration_set_event_destination
+
## shield
0% implemented
- [ ] associate_drt_log_bucket
@@ -5984,8 +6140,11 @@
- [ ] list_signing_jobs
- [ ] list_signing_platforms
- [ ] list_signing_profiles
+- [ ] list_tags_for_resource
- [ ] put_signing_profile
- [ ] start_signing_job
+- [ ] tag_resource
+- [ ] untag_resource
## sms
0% implemented
@@ -6111,7 +6270,7 @@
- [X] untag_queue
## ssm
-10% implemented
+11% implemented
- [X] add_tags_to_resource
- [ ] cancel_command
- [ ] cancel_maintenance_window_execution
@@ -6184,7 +6343,7 @@
- [ ] get_ops_item
- [ ] get_ops_summary
- [X] get_parameter
-- [ ] get_parameter_history
+- [X] get_parameter_history
- [X] get_parameters
- [X] get_parameters_by_path
- [ ] get_patch_baseline
@@ -6233,6 +6392,19 @@
- [ ] update_patch_baseline
- [ ] update_service_setting
+## sso
+0% implemented
+- [ ] get_role_credentials
+- [ ] list_account_roles
+- [ ] list_accounts
+- [ ] logout
+
+## sso-oidc
+0% implemented
+- [ ] create_token
+- [ ] register_client
+- [ ] start_device_authorization
+
## stepfunctions
36% implemented
- [ ] create_activity
@@ -6742,6 +6914,7 @@
- [ ] delete_ip_group
- [ ] delete_tags
- [ ] delete_workspace_image
+- [ ] deregister_workspace_directory
- [ ] describe_account
- [ ] describe_account_modifications
- [ ] describe_client_properties
@@ -6758,10 +6931,14 @@
- [ ] list_available_management_cidr_ranges
- [ ] modify_account
- [ ] modify_client_properties
+- [ ] modify_selfservice_permissions
+- [ ] modify_workspace_access_properties
+- [ ] modify_workspace_creation_properties
- [ ] modify_workspace_properties
- [ ] modify_workspace_state
- [ ] reboot_workspaces
- [ ] rebuild_workspaces
+- [ ] register_workspace_directory
- [ ] restore_workspace
- [ ] revoke_ip_rules
- [ ] start_workspaces
diff --git a/Makefile b/Makefile
index ca0286984..e84d036b7 100644
--- a/Makefile
+++ b/Makefile
@@ -31,7 +31,8 @@ aws_managed_policies:
scripts/update_managed_policies.py
upload_pypi_artifact:
- python setup.py sdist bdist_wheel upload
+ python setup.py sdist bdist_wheel
+ twine upload dist/*
push_dockerhub_image:
docker build -t motoserver/moto .
diff --git a/moto/__init__.py b/moto/__init__.py
index cbca726d0..767c0ee27 100644
--- a/moto/__init__.py
+++ b/moto/__init__.py
@@ -58,7 +58,7 @@ from .xray import XRaySegment, mock_xray, mock_xray_client # noqa
# logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = "moto"
-__version__ = "1.3.14.dev"
+__version__ = "1.3.15.dev"
try:
diff --git a/moto/awslambda/models.py b/moto/awslambda/models.py
index 48f52f4d9..f71eab304 100644
--- a/moto/awslambda/models.py
+++ b/moto/awslambda/models.py
@@ -53,8 +53,9 @@ try:
except ImportError:
from backports.tempfile import TemporaryDirectory
-
-_stderr_regex = re.compile(r"START|END|REPORT RequestId: .*")
+# The lambci container is returning a special escape character for the "RequestID" fields. Unicode 033:
+# _stderr_regex = re.compile(r"START|END|REPORT RequestId: .*")
+_stderr_regex = re.compile(r"\033\[\d+.*")
_orig_adapter_send = requests.adapters.HTTPAdapter.send
docker_3 = docker.__version__[0] >= "3"
@@ -450,7 +451,7 @@ class LambdaFunction(BaseModel):
if exit_code != 0:
raise Exception("lambda invoke failed output: {}".format(output))
- # strip out RequestId lines
+ # strip out RequestId lines (TODO: This will return an additional '\n' in the response)
output = os.linesep.join(
[
line
@@ -998,6 +999,32 @@ class LambdaBackend(BaseBackend):
def add_policy(self, function_name, policy):
self.get_function(function_name).policy = policy
+ def update_function_code(self, function_name, qualifier, body):
+ fn = self.get_function(function_name, qualifier)
+
+ if fn:
+ if body.get("Publish", False):
+ fn = self.publish_function(function_name)
+
+ config = fn.update_function_code(body)
+ return config
+ else:
+ return None
+
+ def update_function_configuration(self, function_name, qualifier, body):
+ fn = self.get_function(function_name, qualifier)
+
+ return fn.update_configuration(body) if fn else None
+
+ def invoke(self, function_name, qualifier, body, headers, response_headers):
+ fn = self.get_function(function_name, qualifier)
+ if fn:
+ payload = fn.invoke(body, headers, response_headers)
+ response_headers["Content-Length"] = str(len(payload))
+ return response_headers, payload
+ else:
+ return response_headers, None
+
def do_validate_s3():
return os.environ.get("VALIDATE_LAMBDA_S3", "") in ["", "1", "true"]
diff --git a/moto/awslambda/responses.py b/moto/awslambda/responses.py
index bef032143..46203c10d 100644
--- a/moto/awslambda/responses.py
+++ b/moto/awslambda/responses.py
@@ -168,10 +168,10 @@ class LambdaResponse(BaseResponse):
function_name = self.path.rsplit("/", 2)[-2]
qualifier = self._get_param("qualifier")
- fn = self.lambda_backend.get_function(function_name, qualifier)
- if fn:
- payload = fn.invoke(self.body, self.headers, response_headers)
- response_headers["Content-Length"] = str(len(payload))
+ response_header, payload = self.lambda_backend.invoke(
+ function_name, qualifier, self.body, self.headers, response_headers
+ )
+ if payload:
return 202, response_headers, payload
else:
return 404, response_headers, "{}"
@@ -321,26 +321,23 @@ class LambdaResponse(BaseResponse):
def _put_configuration(self, request):
function_name = self.path.rsplit("/", 2)[-2]
qualifier = self._get_param("Qualifier", None)
+ resp = self.lambda_backend.update_function_configuration(
+ function_name, qualifier, body=self.json_body
+ )
- fn = self.lambda_backend.get_function(function_name, qualifier)
-
- if fn:
- config = fn.update_configuration(self.json_body)
- return 200, {}, json.dumps(config)
+ if resp:
+ return 200, {}, json.dumps(resp)
else:
return 404, {}, "{}"
def _put_code(self):
function_name = self.path.rsplit("/", 2)[-2]
qualifier = self._get_param("Qualifier", None)
+ resp = self.lambda_backend.update_function_code(
+ function_name, qualifier, body=self.json_body
+ )
- fn = self.lambda_backend.get_function(function_name, qualifier)
-
- if fn:
- if self.json_body.get("Publish", False):
- fn = self.lambda_backend.publish_function(function_name)
-
- config = fn.update_function_code(self.json_body)
- return 200, {}, json.dumps(config)
+ if resp:
+ return 200, {}, json.dumps(resp)
else:
return 404, {}, "{}"
diff --git a/moto/batch/models.py b/moto/batch/models.py
index 5c7fb4739..ab52db54c 100644
--- a/moto/batch/models.py
+++ b/moto/batch/models.py
@@ -624,7 +624,7 @@ class BatchBackend(BaseBackend):
def get_job_definition(self, identifier):
"""
- Get job defintiion by name or ARN
+ Get job definitions by name or ARN
:param identifier: Name or ARN
:type identifier: str
@@ -643,7 +643,7 @@ class BatchBackend(BaseBackend):
def get_job_definitions(self, identifier):
"""
- Get job defintiion by name or ARN
+ Get job definitions by name or ARN
:param identifier: Name or ARN
:type identifier: str
@@ -934,7 +934,7 @@ class BatchBackend(BaseBackend):
self.ecs_backend.delete_cluster(compute_env.ecs_name)
if compute_env.env_type == "MANAGED":
- # Delete compute envrionment
+ # Delete compute environment
instance_ids = [instance.id for instance in compute_env.instances]
self.ec2_backend.terminate_instances(instance_ids)
@@ -1195,7 +1195,7 @@ class BatchBackend(BaseBackend):
depends_on=None,
container_overrides=None,
):
- # TODO parameters, retries (which is a dict raw from request), job dependancies and container overrides are ignored for now
+ # TODO parameters, retries (which is a dict raw from request), job dependencies and container overrides are ignored for now
# Look for job definition
job_def = self.get_job_definition(job_def_id)
diff --git a/moto/datasync/models.py b/moto/datasync/models.py
index 42626cceb..17a2659fb 100644
--- a/moto/datasync/models.py
+++ b/moto/datasync/models.py
@@ -27,12 +27,14 @@ class Task(BaseModel):
name,
region_name,
arn_counter=0,
+ metadata=None,
):
self.source_location_arn = source_location_arn
self.destination_location_arn = destination_location_arn
+ self.name = name
+ self.metadata = metadata
# For simplicity Tasks are either available or running
self.status = "AVAILABLE"
- self.name = name
self.current_task_execution_arn = None
# Generate ARN
self.arn = "arn:aws:datasync:{0}:111222333444:task/task-{1}".format(
@@ -129,7 +131,27 @@ class DataSyncBackend(BaseBackend):
self.locations[location.arn] = location
return location.arn
- def create_task(self, source_location_arn, destination_location_arn, name):
+ def _get_location(self, location_arn, typ):
+ if location_arn not in self.locations:
+ raise InvalidRequestException(
+ "Location {0} is not found.".format(location_arn)
+ )
+ location = self.locations[location_arn]
+ if location.typ != typ:
+ raise InvalidRequestException(
+ "Invalid Location type: {0}".format(location.typ)
+ )
+ return location
+
+ def delete_location(self, location_arn):
+ if location_arn in self.locations:
+ del self.locations[location_arn]
+ else:
+ raise InvalidRequestException
+
+ def create_task(
+ self, source_location_arn, destination_location_arn, name, metadata=None
+ ):
if source_location_arn not in self.locations:
raise InvalidRequestException(
"Location {0} not found.".format(source_location_arn)
@@ -145,10 +167,33 @@ class DataSyncBackend(BaseBackend):
name,
region_name=self.region_name,
arn_counter=self.arn_counter,
+ metadata=metadata,
)
self.tasks[task.arn] = task
return task.arn
+ def _get_task(self, task_arn):
+ if task_arn in self.tasks:
+ return self.tasks[task_arn]
+ else:
+ raise InvalidRequestException
+
+ def update_task(self, task_arn, name, metadata):
+ if task_arn in self.tasks:
+ task = self.tasks[task_arn]
+ task.name = name
+ task.metadata = metadata
+ else:
+ raise InvalidRequestException(
+ "Sync task {0} is not found.".format(task_arn)
+ )
+
+ def delete_task(self, task_arn):
+ if task_arn in self.tasks:
+ del self.tasks[task_arn]
+ else:
+ raise InvalidRequestException
+
def start_task_execution(self, task_arn):
self.arn_counter = self.arn_counter + 1
if task_arn in self.tasks:
@@ -161,12 +206,19 @@ class DataSyncBackend(BaseBackend):
return task_execution.arn
raise InvalidRequestException("Invalid request.")
+ def _get_task_execution(self, task_execution_arn):
+ if task_execution_arn in self.task_executions:
+ return self.task_executions[task_execution_arn]
+ else:
+ raise InvalidRequestException
+
def cancel_task_execution(self, task_execution_arn):
if task_execution_arn in self.task_executions:
task_execution = self.task_executions[task_execution_arn]
task_execution.cancel()
task_arn = task_execution.task_arn
self.tasks[task_arn].current_task_execution_arn = None
+ self.tasks[task_arn].status = "AVAILABLE"
return
raise InvalidRequestException(
"Sync task {0} is not found.".format(task_execution_arn)
diff --git a/moto/datasync/responses.py b/moto/datasync/responses.py
index 30b906d44..23a480523 100644
--- a/moto/datasync/responses.py
+++ b/moto/datasync/responses.py
@@ -2,7 +2,6 @@ import json
from moto.core.responses import BaseResponse
-from .exceptions import InvalidRequestException
from .models import datasync_backends
@@ -18,17 +17,7 @@ class DataSyncResponse(BaseResponse):
return json.dumps({"Locations": locations})
def _get_location(self, location_arn, typ):
- location_arn = self._get_param("LocationArn")
- if location_arn not in self.datasync_backend.locations:
- raise InvalidRequestException(
- "Location {0} is not found.".format(location_arn)
- )
- location = self.datasync_backend.locations[location_arn]
- if location.typ != typ:
- raise InvalidRequestException(
- "Invalid Location type: {0}".format(location.typ)
- )
- return location
+ return self.datasync_backend._get_location(location_arn, typ)
def create_location_s3(self):
# s3://bucket_name/folder/
@@ -86,16 +75,40 @@ class DataSyncResponse(BaseResponse):
}
)
+ def delete_location(self):
+ location_arn = self._get_param("LocationArn")
+ self.datasync_backend.delete_location(location_arn)
+ return json.dumps({})
+
def create_task(self):
destination_location_arn = self._get_param("DestinationLocationArn")
source_location_arn = self._get_param("SourceLocationArn")
name = self._get_param("Name")
-
+ metadata = {
+ "CloudWatchLogGroupArn": self._get_param("CloudWatchLogGroupArn"),
+ "Options": self._get_param("Options"),
+ "Excludes": self._get_param("Excludes"),
+ "Tags": self._get_param("Tags"),
+ }
arn = self.datasync_backend.create_task(
- source_location_arn, destination_location_arn, name
+ source_location_arn, destination_location_arn, name, metadata=metadata
)
return json.dumps({"TaskArn": arn})
+ def update_task(self):
+ task_arn = self._get_param("TaskArn")
+ self.datasync_backend.update_task(
+ task_arn,
+ name=self._get_param("Name"),
+ metadata={
+ "CloudWatchLogGroupArn": self._get_param("CloudWatchLogGroupArn"),
+ "Options": self._get_param("Options"),
+ "Excludes": self._get_param("Excludes"),
+ "Tags": self._get_param("Tags"),
+ },
+ )
+ return json.dumps({})
+
def list_tasks(self):
tasks = list()
for arn, task in self.datasync_backend.tasks.items():
@@ -104,29 +117,32 @@ class DataSyncResponse(BaseResponse):
)
return json.dumps({"Tasks": tasks})
+ def delete_task(self):
+ task_arn = self._get_param("TaskArn")
+ self.datasync_backend.delete_task(task_arn)
+ return json.dumps({})
+
def describe_task(self):
task_arn = self._get_param("TaskArn")
- if task_arn in self.datasync_backend.tasks:
- task = self.datasync_backend.tasks[task_arn]
- return json.dumps(
- {
- "TaskArn": task.arn,
- "Name": task.name,
- "CurrentTaskExecutionArn": task.current_task_execution_arn,
- "Status": task.status,
- "SourceLocationArn": task.source_location_arn,
- "DestinationLocationArn": task.destination_location_arn,
- }
- )
- raise InvalidRequestException
+ task = self.datasync_backend._get_task(task_arn)
+ return json.dumps(
+ {
+ "TaskArn": task.arn,
+ "Status": task.status,
+ "Name": task.name,
+ "CurrentTaskExecutionArn": task.current_task_execution_arn,
+ "SourceLocationArn": task.source_location_arn,
+ "DestinationLocationArn": task.destination_location_arn,
+ "CloudWatchLogGroupArn": task.metadata["CloudWatchLogGroupArn"],
+ "Options": task.metadata["Options"],
+ "Excludes": task.metadata["Excludes"],
+ }
+ )
def start_task_execution(self):
task_arn = self._get_param("TaskArn")
- if task_arn in self.datasync_backend.tasks:
- arn = self.datasync_backend.start_task_execution(task_arn)
- if arn:
- return json.dumps({"TaskExecutionArn": arn})
- raise InvalidRequestException("Invalid request.")
+ arn = self.datasync_backend.start_task_execution(task_arn)
+ return json.dumps({"TaskExecutionArn": arn})
def cancel_task_execution(self):
task_execution_arn = self._get_param("TaskExecutionArn")
@@ -135,21 +151,12 @@ class DataSyncResponse(BaseResponse):
def describe_task_execution(self):
task_execution_arn = self._get_param("TaskExecutionArn")
-
- if task_execution_arn in self.datasync_backend.task_executions:
- task_execution = self.datasync_backend.task_executions[task_execution_arn]
- if task_execution:
- result = json.dumps(
- {
- "TaskExecutionArn": task_execution.arn,
- "Status": task_execution.status,
- }
- )
- if task_execution.status == "SUCCESS":
- self.datasync_backend.tasks[
- task_execution.task_arn
- ].status = "AVAILABLE"
- # Simulate task being executed
- task_execution.iterate_status()
- return result
- raise InvalidRequestException
+ task_execution = self.datasync_backend._get_task_execution(task_execution_arn)
+ result = json.dumps(
+ {"TaskExecutionArn": task_execution.arn, "Status": task_execution.status,}
+ )
+ if task_execution.status == "SUCCESS":
+ self.datasync_backend.tasks[task_execution.task_arn].status = "AVAILABLE"
+ # Simulate task being executed
+ task_execution.iterate_status()
+ return result
diff --git a/moto/dynamodb2/models.py b/moto/dynamodb2/models.py
index 8a061041e..6361f8961 100644
--- a/moto/dynamodb2/models.py
+++ b/moto/dynamodb2/models.py
@@ -77,6 +77,7 @@ class DynamoType(object):
attr, list_index = attribute_is_list(attr)
if not key:
# {'S': value} ==> {'S': new_value}
+ self.type = new_value.type
self.value = new_value.value
else:
if attr not in self.value: # nonexistingattribute
diff --git a/moto/ec2/models.py b/moto/ec2/models.py
index efbbeb6fe..afb23dc80 100644
--- a/moto/ec2/models.py
+++ b/moto/ec2/models.py
@@ -214,6 +214,7 @@ class NetworkInterface(TaggedEC2Resource):
ec2_backend,
subnet,
private_ip_address,
+ private_ip_addresses=None,
device_index=0,
public_ip_auto_assign=True,
group_ids=None,
@@ -223,6 +224,7 @@ class NetworkInterface(TaggedEC2Resource):
self.id = random_eni_id()
self.device_index = device_index
self.private_ip_address = private_ip_address or random_private_ip()
+ self.private_ip_addresses = private_ip_addresses
self.subnet = subnet
self.instance = None
self.attachment_id = None
@@ -341,12 +343,19 @@ class NetworkInterfaceBackend(object):
super(NetworkInterfaceBackend, self).__init__()
def create_network_interface(
- self, subnet, private_ip_address, group_ids=None, description=None, **kwargs
+ self,
+ subnet,
+ private_ip_address,
+ private_ip_addresses=None,
+ group_ids=None,
+ description=None,
+ **kwargs
):
eni = NetworkInterface(
self,
subnet,
private_ip_address,
+ private_ip_addresses,
group_ids=group_ids,
description=description,
**kwargs
@@ -2435,6 +2444,7 @@ class VPC(TaggedEC2Resource):
self.instance_tenancy = instance_tenancy
self.is_default = "true" if is_default else "false"
self.enable_dns_support = "true"
+ self.classic_link_enabled = "false"
# This attribute is set to 'true' only for default VPCs
# or VPCs created using the wizard of the VPC console
self.enable_dns_hostnames = "true" if is_default else "false"
@@ -2531,6 +2541,32 @@ class VPC(TaggedEC2Resource):
self.cidr_block_association_set[association_id] = association_set
return association_set
+ def enable_vpc_classic_link(self):
+ # Check if current cidr block doesn't fall within the 10.0.0.0/8 block, excluding 10.0.0.0/16 and 10.1.0.0/16.
+ # Doesn't check any route tables, maybe something for in the future?
+ # See https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/vpc-classiclink.html#classiclink-limitations
+ network_address = ipaddress.ip_network(self.cidr_block).network_address
+ if (
+ network_address not in ipaddress.ip_network("10.0.0.0/8")
+ or network_address in ipaddress.ip_network("10.0.0.0/16")
+ or network_address in ipaddress.ip_network("10.1.0.0/16")
+ ):
+ self.classic_link_enabled = "true"
+
+ return self.classic_link_enabled
+
+ def disable_vpc_classic_link(self):
+ self.classic_link_enabled = "false"
+ return self.classic_link_enabled
+
+ def enable_vpc_classic_link_dns_support(self):
+ self.classic_link_dns_supported = "true"
+ return self.classic_link_dns_supported
+
+ def disable_vpc_classic_link_dns_support(self):
+ self.classic_link_dns_supported = "false"
+ return self.classic_link_dns_supported
+
def disassociate_vpc_cidr_block(self, association_id):
if self.cidr_block == self.cidr_block_association_set.get(
association_id, {}
@@ -2661,6 +2697,22 @@ class VPCBackend(object):
else:
raise InvalidParameterValueError(attr_name)
+ def enable_vpc_classic_link(self, vpc_id):
+ vpc = self.get_vpc(vpc_id)
+ return vpc.enable_vpc_classic_link()
+
+ def disable_vpc_classic_link(self, vpc_id):
+ vpc = self.get_vpc(vpc_id)
+ return vpc.disable_vpc_classic_link()
+
+ def enable_vpc_classic_link_dns_support(self, vpc_id):
+ vpc = self.get_vpc(vpc_id)
+ return vpc.enable_vpc_classic_link_dns_support()
+
+ def disable_vpc_classic_link_dns_support(self, vpc_id):
+ vpc = self.get_vpc(vpc_id)
+ return vpc.disable_vpc_classic_link_dns_support()
+
def modify_vpc_attribute(self, vpc_id, attr_name, attr_value):
vpc = self.get_vpc(vpc_id)
if attr_name in ("enable_dns_support", "enable_dns_hostnames"):
@@ -2819,6 +2871,9 @@ class Subnet(TaggedEC2Resource):
self.vpc_id = vpc_id
self.cidr_block = cidr_block
self.cidr = ipaddress.IPv4Network(six.text_type(self.cidr_block), strict=False)
+ self._available_ip_addresses = (
+ ipaddress.IPv4Network(six.text_type(self.cidr_block)).num_addresses - 5
+ )
self._availability_zone = availability_zone
self.default_for_az = default_for_az
self.map_public_ip_on_launch = map_public_ip_on_launch
@@ -2854,6 +2909,21 @@ class Subnet(TaggedEC2Resource):
return subnet
+ @property
+ def available_ip_addresses(self):
+ enis = [
+ eni
+ for eni in self.ec2_backend.get_all_network_interfaces()
+ if eni.subnet.id == self.id
+ ]
+ addresses_taken = [
+ eni.private_ip_address for eni in enis if eni.private_ip_address
+ ]
+ for eni in enis:
+ if eni.private_ip_addresses:
+ addresses_taken.extend(eni.private_ip_addresses)
+ return str(self._available_ip_addresses - len(addresses_taken))
+
@property
def availability_zone(self):
return self._availability_zone.name
diff --git a/moto/ec2/responses/elastic_network_interfaces.py b/moto/ec2/responses/elastic_network_interfaces.py
index fa014b219..6761b294e 100644
--- a/moto/ec2/responses/elastic_network_interfaces.py
+++ b/moto/ec2/responses/elastic_network_interfaces.py
@@ -7,12 +7,13 @@ class ElasticNetworkInterfaces(BaseResponse):
def create_network_interface(self):
subnet_id = self._get_param("SubnetId")
private_ip_address = self._get_param("PrivateIpAddress")
+ private_ip_addresses = self._get_multi_param("PrivateIpAddresses")
groups = self._get_multi_param("SecurityGroupId")
subnet = self.ec2_backend.get_subnet(subnet_id)
description = self._get_param("Description")
if self.is_not_dryrun("CreateNetworkInterface"):
eni = self.ec2_backend.create_network_interface(
- subnet, private_ip_address, groups, description
+ subnet, private_ip_address, private_ip_addresses, groups, description
)
template = self.response_template(CREATE_NETWORK_INTERFACE_RESPONSE)
return template.render(eni=eni)
diff --git a/moto/ec2/responses/subnets.py b/moto/ec2/responses/subnets.py
index c42583f23..e11984e52 100644
--- a/moto/ec2/responses/subnets.py
+++ b/moto/ec2/responses/subnets.py
@@ -53,7 +53,7 @@ CREATE_SUBNET_RESPONSE = """
pending
{{ subnet.vpc_id }}
{{ subnet.cidr_block }}
- 251
+ {{ subnet.available_ip_addresses }}
{{ subnet._availability_zone.name }}
{{ subnet._availability_zone.zone_id }}
{{ subnet.default_for_az }}
@@ -81,7 +81,7 @@ DESCRIBE_SUBNETS_RESPONSE = """
available
{{ subnet.vpc_id }}
{{ subnet.cidr_block }}
- 251
+ {{ subnet.available_ip_addresses }}
{{ subnet._availability_zone.name }}
{{ subnet._availability_zone.zone_id }}
{{ subnet.default_for_az }}
diff --git a/moto/ec2/responses/vpcs.py b/moto/ec2/responses/vpcs.py
index 1773e4cc8..0fd198378 100644
--- a/moto/ec2/responses/vpcs.py
+++ b/moto/ec2/responses/vpcs.py
@@ -5,6 +5,13 @@ from moto.ec2.utils import filters_from_querystring
class VPCs(BaseResponse):
+ def _get_doc_date(self):
+ return (
+ "2013-10-15"
+ if "Boto/" in self.headers.get("user-agent", "")
+ else "2016-11-15"
+ )
+
def create_vpc(self):
cidr_block = self._get_param("CidrBlock")
instance_tenancy = self._get_param("InstanceTenancy", if_none="default")
@@ -16,11 +23,7 @@ class VPCs(BaseResponse):
instance_tenancy,
amazon_provided_ipv6_cidr_block=amazon_provided_ipv6_cidr_blocks,
)
- doc_date = (
- "2013-10-15"
- if "Boto/" in self.headers.get("user-agent", "")
- else "2016-11-15"
- )
+ doc_date = self._get_doc_date()
template = self.response_template(CREATE_VPC_RESPONSE)
return template.render(vpc=vpc, doc_date=doc_date)
@@ -50,6 +53,64 @@ class VPCs(BaseResponse):
template = self.response_template(DESCRIBE_VPC_ATTRIBUTE_RESPONSE)
return template.render(vpc_id=vpc_id, attribute=attribute, value=value)
+ def describe_vpc_classic_link_dns_support(self):
+ vpc_ids = self._get_multi_param("VpcIds")
+ filters = filters_from_querystring(self.querystring)
+ vpcs = self.ec2_backend.get_all_vpcs(vpc_ids=vpc_ids, filters=filters)
+ doc_date = self._get_doc_date()
+ template = self.response_template(
+ DESCRIBE_VPC_CLASSIC_LINK_DNS_SUPPORT_RESPONSE
+ )
+ return template.render(vpcs=vpcs, doc_date=doc_date)
+
+ def enable_vpc_classic_link_dns_support(self):
+ vpc_id = self._get_param("VpcId")
+ classic_link_dns_supported = self.ec2_backend.enable_vpc_classic_link_dns_support(
+ vpc_id=vpc_id
+ )
+ doc_date = self._get_doc_date()
+ template = self.response_template(ENABLE_VPC_CLASSIC_LINK_DNS_SUPPORT_RESPONSE)
+ return template.render(
+ classic_link_dns_supported=classic_link_dns_supported, doc_date=doc_date
+ )
+
+ def disable_vpc_classic_link_dns_support(self):
+ vpc_id = self._get_param("VpcId")
+ classic_link_dns_supported = self.ec2_backend.disable_vpc_classic_link_dns_support(
+ vpc_id=vpc_id
+ )
+ doc_date = self._get_doc_date()
+ template = self.response_template(DISABLE_VPC_CLASSIC_LINK_DNS_SUPPORT_RESPONSE)
+ return template.render(
+ classic_link_dns_supported=classic_link_dns_supported, doc_date=doc_date
+ )
+
+ def describe_vpc_classic_link(self):
+ vpc_ids = self._get_multi_param("VpcId")
+ filters = filters_from_querystring(self.querystring)
+ vpcs = self.ec2_backend.get_all_vpcs(vpc_ids=vpc_ids, filters=filters)
+ doc_date = self._get_doc_date()
+ template = self.response_template(DESCRIBE_VPC_CLASSIC_LINK_RESPONSE)
+ return template.render(vpcs=vpcs, doc_date=doc_date)
+
+ def enable_vpc_classic_link(self):
+ vpc_id = self._get_param("VpcId")
+ classic_link_enabled = self.ec2_backend.enable_vpc_classic_link(vpc_id=vpc_id)
+ doc_date = self._get_doc_date()
+ template = self.response_template(ENABLE_VPC_CLASSIC_LINK_RESPONSE)
+ return template.render(
+ classic_link_enabled=classic_link_enabled, doc_date=doc_date
+ )
+
+ def disable_vpc_classic_link(self):
+ vpc_id = self._get_param("VpcId")
+ classic_link_enabled = self.ec2_backend.disable_vpc_classic_link(vpc_id=vpc_id)
+ doc_date = self._get_doc_date()
+ template = self.response_template(DISABLE_VPC_CLASSIC_LINK_RESPONSE)
+ return template.render(
+ classic_link_enabled=classic_link_enabled, doc_date=doc_date
+ )
+
def modify_vpc_attribute(self):
vpc_id = self._get_param("VpcId")
@@ -149,6 +210,56 @@ CREATE_VPC_RESPONSE = """
"""
+DESCRIBE_VPC_CLASSIC_LINK_DNS_SUPPORT_RESPONSE = """
+
+ 7a62c442-3484-4f42-9342-6942EXAMPLE
+
+ {% for vpc in vpcs %}
+ -
+ {{ vpc.id }}
+ {{ vpc.classic_link_dns_supported }}
+
+ {% endfor %}
+
+"""
+
+ENABLE_VPC_CLASSIC_LINK_DNS_SUPPORT_RESPONSE = """
+
+ 7a62c442-3484-4f42-9342-6942EXAMPLE
+ {{ classic_link_dns_supported }}
+"""
+
+DISABLE_VPC_CLASSIC_LINK_DNS_SUPPORT_RESPONSE = """
+
+ 7a62c442-3484-4f42-9342-6942EXAMPLE
+ {{ classic_link_dns_supported }}
+"""
+
+DESCRIBE_VPC_CLASSIC_LINK_RESPONSE = """
+
+ 7a62c442-3484-4f42-9342-6942EXAMPLE
+
+ {% for vpc in vpcs %}
+ -
+ {{ vpc.id }}
+ {{ vpc.classic_link_enabled }}
+
+ {% endfor %}
+
+"""
+
+ENABLE_VPC_CLASSIC_LINK_RESPONSE = """
+
+ 7a62c442-3484-4f42-9342-6942EXAMPLE
+ {{ classic_link_enabled }}
+"""
+
+DISABLE_VPC_CLASSIC_LINK_RESPONSE = """
+
+ 7a62c442-3484-4f42-9342-6942EXAMPLE
+ {{ classic_link_enabled }}
+"""
+
DESCRIBE_VPCS_RESPONSE = """
7a62c442-3484-4f42-9342-6942EXAMPLE
diff --git a/moto/events/models.py b/moto/events/models.py
index e69062b2c..be4153b9f 100644
--- a/moto/events/models.py
+++ b/moto/events/models.py
@@ -5,6 +5,7 @@ import boto3
from moto.core.exceptions import JsonRESTError
from moto.core import BaseBackend, BaseModel
+from moto.sts.models import ACCOUNT_ID
class Rule(BaseModel):
@@ -54,6 +55,42 @@ class Rule(BaseModel):
self.targets.pop(index)
+class EventBus(BaseModel):
+ def __init__(self, region_name, name):
+ self.region = region_name
+ self.name = name
+
+ self._permissions = {}
+
+ @property
+ def arn(self):
+ return "arn:aws:events:{region}:{account_id}:event-bus/{name}".format(
+ region=self.region, account_id=ACCOUNT_ID, name=self.name
+ )
+
+ @property
+ def policy(self):
+ if not len(self._permissions):
+ return None
+
+ policy = {"Version": "2012-10-17", "Statement": []}
+
+ for sid, permission in self._permissions.items():
+ policy["Statement"].append(
+ {
+ "Sid": sid,
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": "arn:aws:iam::{}:root".format(permission["Principal"])
+ },
+ "Action": permission["Action"],
+ "Resource": self.arn,
+ }
+ )
+
+ return json.dumps(policy)
+
+
class EventsBackend(BaseBackend):
ACCOUNT_ID = re.compile(r"^(\d{1,12}|\*)$")
STATEMENT_ID = re.compile(r"^[a-zA-Z0-9-_]{1,64}$")
@@ -65,13 +102,19 @@ class EventsBackend(BaseBackend):
self.rules_order = []
self.next_tokens = {}
self.region_name = region_name
- self.permissions = {}
+ self.event_buses = {}
+ self.event_sources = {}
+
+ self._add_default_event_bus()
def reset(self):
region_name = self.region_name
self.__dict__ = {}
self.__init__(region_name)
+ def _add_default_event_bus(self):
+ self.event_buses["default"] = EventBus(self.region_name, "default")
+
def _get_rule_by_index(self, i):
return self.rules.get(self.rules_order[i])
@@ -221,9 +264,17 @@ class EventsBackend(BaseBackend):
def test_event_pattern(self):
raise NotImplementedError()
- def put_permission(self, action, principal, statement_id):
+ def put_permission(self, event_bus_name, action, principal, statement_id):
+ if not event_bus_name:
+ event_bus_name = "default"
+
+ event_bus = self.describe_event_bus(event_bus_name)
+
if action is None or action != "events:PutEvents":
- raise JsonRESTError("InvalidParameterValue", "Action must be PutEvents")
+ raise JsonRESTError(
+ "ValidationException",
+ "Provided value in parameter 'action' is not supported.",
+ )
if principal is None or self.ACCOUNT_ID.match(principal) is None:
raise JsonRESTError(
@@ -235,34 +286,81 @@ class EventsBackend(BaseBackend):
"InvalidParameterValue", "StatementId must match ^[a-zA-Z0-9-_]{1,64}$"
)
- self.permissions[statement_id] = {"action": action, "principal": principal}
+ event_bus._permissions[statement_id] = {
+ "Action": action,
+ "Principal": principal,
+ }
- def remove_permission(self, statement_id):
- try:
- del self.permissions[statement_id]
- except KeyError:
- raise JsonRESTError("ResourceNotFoundException", "StatementId not found")
+ def remove_permission(self, event_bus_name, statement_id):
+ if not event_bus_name:
+ event_bus_name = "default"
- def describe_event_bus(self):
- arn = "arn:aws:events:{0}:000000000000:event-bus/default".format(
- self.region_name
- )
- statements = []
- for statement_id, data in self.permissions.items():
- statements.append(
- {
- "Sid": statement_id,
- "Effect": "Allow",
- "Principal": {
- "AWS": "arn:aws:iam::{0}:root".format(data["principal"])
- },
- "Action": data["action"],
- "Resource": arn,
- }
+ event_bus = self.describe_event_bus(event_bus_name)
+
+ if not len(event_bus._permissions):
+ raise JsonRESTError(
+ "ResourceNotFoundException", "EventBus does not have a policy."
)
- policy = {"Version": "2012-10-17", "Statement": statements}
- policy_json = json.dumps(policy)
- return {"Policy": policy_json, "Name": "default", "Arn": arn}
+
+ if not event_bus._permissions.pop(statement_id, None):
+ raise JsonRESTError(
+ "ResourceNotFoundException",
+ "Statement with the provided id does not exist.",
+ )
+
+ def describe_event_bus(self, name):
+ if not name:
+ name = "default"
+
+ event_bus = self.event_buses.get(name)
+
+ if not event_bus:
+ raise JsonRESTError(
+ "ResourceNotFoundException",
+ "Event bus {} does not exist.".format(name),
+ )
+
+ return event_bus
+
+ def create_event_bus(self, name, event_source_name):
+ if name in self.event_buses:
+ raise JsonRESTError(
+ "ResourceAlreadyExistsException",
+ "Event bus {} already exists.".format(name),
+ )
+
+ if not event_source_name and "/" in name:
+ raise JsonRESTError(
+ "ValidationException", "Event bus name must not contain '/'."
+ )
+
+ if event_source_name and event_source_name not in self.event_sources:
+ raise JsonRESTError(
+ "ResourceNotFoundException",
+ "Event source {} does not exist.".format(event_source_name),
+ )
+
+ self.event_buses[name] = EventBus(self.region_name, name)
+
+ return self.event_buses[name]
+
+ def list_event_buses(self, name_prefix):
+ if name_prefix:
+ return [
+ event_bus
+ for event_bus in self.event_buses.values()
+ if event_bus.name.startswith(name_prefix)
+ ]
+
+ return list(self.event_buses.values())
+
+ def delete_event_bus(self, name):
+ if name == "default":
+ raise JsonRESTError(
+ "ValidationException", "Cannot delete event bus default."
+ )
+
+ self.event_buses.pop(name, None)
available_regions = boto3.session.Session().get_available_regions("events")
diff --git a/moto/events/responses.py b/moto/events/responses.py
index 39c5c75dc..98a33218a 100644
--- a/moto/events/responses.py
+++ b/moto/events/responses.py
@@ -238,20 +238,68 @@ class EventsHandler(BaseResponse):
pass
def put_permission(self):
+ event_bus_name = self._get_param("EventBusName")
action = self._get_param("Action")
principal = self._get_param("Principal")
statement_id = self._get_param("StatementId")
- self.events_backend.put_permission(action, principal, statement_id)
+ self.events_backend.put_permission(
+ event_bus_name, action, principal, statement_id
+ )
return ""
def remove_permission(self):
+ event_bus_name = self._get_param("EventBusName")
statement_id = self._get_param("StatementId")
- self.events_backend.remove_permission(statement_id)
+ self.events_backend.remove_permission(event_bus_name, statement_id)
return ""
def describe_event_bus(self):
- return json.dumps(self.events_backend.describe_event_bus())
+ name = self._get_param("Name")
+
+ event_bus = self.events_backend.describe_event_bus(name)
+ response = {
+ "Name": event_bus.name,
+ "Arn": event_bus.arn,
+ }
+
+ if event_bus.policy:
+ response["Policy"] = event_bus.policy
+
+ return json.dumps(response), self.response_headers
+
+ def create_event_bus(self):
+ name = self._get_param("Name")
+ event_source_name = self._get_param("EventSourceName")
+
+ event_bus = self.events_backend.create_event_bus(name, event_source_name)
+
+ return json.dumps({"EventBusArn": event_bus.arn}), self.response_headers
+
+ def list_event_buses(self):
+ name_prefix = self._get_param("NamePrefix")
+ # ToDo: add 'NextToken' & 'Limit' parameters
+
+ response = []
+ for event_bus in self.events_backend.list_event_buses(name_prefix):
+ event_bus_response = {
+ "Name": event_bus.name,
+ "Arn": event_bus.arn,
+ }
+
+ if event_bus.policy:
+ event_bus_response["Policy"] = event_bus.policy
+
+ response.append(event_bus_response)
+
+ return json.dumps({"EventBuses": response}), self.response_headers
+
+ def delete_event_bus(self):
+ name = self._get_param("Name")
+
+ self.events_backend.delete_event_bus(name)
+
+ return "", self.response_headers
diff --git a/moto/iam/models.py b/moto/iam/models.py
index 564a07afb..c67d5b365 100644
--- a/moto/iam/models.py
+++ b/moto/iam/models.py
@@ -1,5 +1,6 @@
from __future__ import unicode_literals
import base64
+import hashlib
import os
import random
import string
@@ -475,6 +476,20 @@ class AccessKey(BaseModel):
raise UnformattedGetAttTemplateException()
+class SshPublicKey(BaseModel):
+ def __init__(self, user_name, ssh_public_key_body):
+ self.user_name = user_name
+ self.ssh_public_key_body = ssh_public_key_body
+ self.ssh_public_key_id = "APKA" + random_access_key()
+ self.fingerprint = hashlib.md5(ssh_public_key_body.encode()).hexdigest()
+ self.status = "Active"
+ self.upload_date = datetime.utcnow()
+
+ @property
+ def uploaded_iso_8601(self):
+ return iso_8601_datetime_without_milliseconds(self.upload_date)
+
+
class Group(BaseModel):
def __init__(self, name, path="/"):
self.name = name
@@ -536,6 +551,7 @@ class User(BaseModel):
self.policies = {}
self.managed_policies = {}
self.access_keys = []
+ self.ssh_public_keys = []
self.password = None
self.password_reset_required = False
self.signing_certificates = {}
@@ -605,6 +621,33 @@ class User(BaseModel):
"The Access Key with id {0} cannot be found".format(access_key_id)
)
+ def upload_ssh_public_key(self, ssh_public_key_body):
+ pubkey = SshPublicKey(self.name, ssh_public_key_body)
+ self.ssh_public_keys.append(pubkey)
+ return pubkey
+
+ def get_ssh_public_key(self, ssh_public_key_id):
+ for key in self.ssh_public_keys:
+ if key.ssh_public_key_id == ssh_public_key_id:
+ return key
+ else:
+ raise IAMNotFoundException(
+ "The SSH Public Key with id {0} cannot be found".format(
+ ssh_public_key_id
+ )
+ )
+
+ def get_all_ssh_public_keys(self):
+ return self.ssh_public_keys
+
+ def update_ssh_public_key(self, ssh_public_key_id, status):
+ key = self.get_ssh_public_key(ssh_public_key_id)
+ key.status = status
+
+ def delete_ssh_public_key(self, ssh_public_key_id):
+ key = self.get_ssh_public_key(ssh_public_key_id)
+ self.ssh_public_keys.remove(key)
+
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
@@ -719,7 +762,7 @@ class AccountPasswordPolicy(BaseModel):
def _format_error(self, key, value, constraint):
return 'Value "{value}" at "{key}" failed to satisfy constraint: {constraint}'.format(
- constraint=constraint, key=key, value=value,
+ constraint=constraint, key=key, value=value
)
def _raise_errors(self):
@@ -731,11 +774,139 @@ class AccountPasswordPolicy(BaseModel):
raise ValidationError(
"{count} validation error{plural} detected: {errors}".format(
- count=count, plural=plural, errors=errors,
+ count=count, plural=plural, errors=errors
)
)
+class AccountSummary(BaseModel):
+ def __init__(self, iam_backend):
+ self._iam_backend = iam_backend
+
+ self._group_policy_size_quota = 5120
+ self._instance_profiles_quota = 1000
+ self._groups_per_user_quota = 10
+ self._attached_policies_per_user_quota = 10
+ self._policies_quota = 1500
+ self._account_mfa_enabled = 0 # Haven't found any information being able to activate MFA for the root account programmatically
+ self._access_keys_per_user_quota = 2
+ self._assume_role_policy_size_quota = 2048
+ self._policy_versions_in_use_quota = 10000
+ self._global_endpoint_token_version = (
+ 1 # ToDo: Implement set_security_token_service_preferences()
+ )
+ self._versions_per_policy_quota = 5
+ self._attached_policies_per_group_quota = 10
+ self._policy_size_quota = 6144
+ self._account_signing_certificates_present = 0 # valid values: 0 | 1
+ self._users_quota = 5000
+ self._server_certificates_quota = 20
+ self._user_policy_size_quota = 2048
+ self._roles_quota = 1000
+ self._signing_certificates_per_user_quota = 2
+ self._role_policy_size_quota = 10240
+ self._attached_policies_per_role_quota = 10
+ self._account_access_keys_present = 0 # valid values: 0 | 1
+ self._groups_quota = 300
+
+ @property
+ def summary_map(self):
+ return {
+ "GroupPolicySizeQuota": self._group_policy_size_quota,
+ "InstanceProfilesQuota": self._instance_profiles_quota,
+ "Policies": self._policies,
+ "GroupsPerUserQuota": self._groups_per_user_quota,
+ "InstanceProfiles": self._instance_profiles,
+ "AttachedPoliciesPerUserQuota": self._attached_policies_per_user_quota,
+ "Users": self._users,
+ "PoliciesQuota": self._policies_quota,
+ "Providers": self._providers,
+ "AccountMFAEnabled": self._account_mfa_enabled,
+ "AccessKeysPerUserQuota": self._access_keys_per_user_quota,
+ "AssumeRolePolicySizeQuota": self._assume_role_policy_size_quota,
+ "PolicyVersionsInUseQuota": self._policy_versions_in_use_quota,
+ "GlobalEndpointTokenVersion": self._global_endpoint_token_version,
+ "VersionsPerPolicyQuota": self._versions_per_policy_quota,
+ "AttachedPoliciesPerGroupQuota": self._attached_policies_per_group_quota,
+ "PolicySizeQuota": self._policy_size_quota,
+ "Groups": self._groups,
+ "AccountSigningCertificatesPresent": self._account_signing_certificates_present,
+ "UsersQuota": self._users_quota,
+ "ServerCertificatesQuota": self._server_certificates_quota,
+ "MFADevices": self._mfa_devices,
+ "UserPolicySizeQuota": self._user_policy_size_quota,
+ "PolicyVersionsInUse": self._policy_versions_in_use,
+ "ServerCertificates": self._server_certificates,
+ "Roles": self._roles,
+ "RolesQuota": self._roles_quota,
+ "SigningCertificatesPerUserQuota": self._signing_certificates_per_user_quota,
+ "MFADevicesInUse": self._mfa_devices_in_use,
+ "RolePolicySizeQuota": self._role_policy_size_quota,
+ "AttachedPoliciesPerRoleQuota": self._attached_policies_per_role_quota,
+ "AccountAccessKeysPresent": self._account_access_keys_present,
+ "GroupsQuota": self._groups_quota,
+ }
+
+ @property
+ def _groups(self):
+ return len(self._iam_backend.groups)
+
+ @property
+ def _instance_profiles(self):
+ return len(self._iam_backend.instance_profiles)
+
+ @property
+ def _mfa_devices(self):
+ # Don't know, if hardware devices are also counted here
+ return len(self._iam_backend.virtual_mfa_devices)
+
+ @property
+ def _mfa_devices_in_use(self):
+ devices = 0
+
+ for user in self._iam_backend.users.values():
+ devices += len(user.mfa_devices)
+
+ return devices
+
+ @property
+ def _policies(self):
+ customer_policies = [
+ policy
+ for policy in self._iam_backend.managed_policies
+ if not policy.startswith("arn:aws:iam::aws:policy")
+ ]
+ return len(customer_policies)
+
+ @property
+ def _policy_versions_in_use(self):
+ attachments = 0
+
+ for policy in self._iam_backend.managed_policies.values():
+ attachments += policy.attachment_count
+
+ return attachments
+
+ @property
+ def _providers(self):
+ providers = len(self._iam_backend.saml_providers) + len(
+ self._iam_backend.open_id_providers
+ )
+ return providers
+
+ @property
+ def _roles(self):
+ return len(self._iam_backend.roles)
+
+ @property
+ def _server_certificates(self):
+ return len(self._iam_backend.certificates)
+
+ @property
+ def _users(self):
+ return len(self._iam_backend.users)
+
+
class IAMBackend(BaseBackend):
def __init__(self):
self.instance_profiles = {}
@@ -751,6 +922,7 @@ class IAMBackend(BaseBackend):
self.policy_arn_regex = re.compile(r"^arn:aws:iam::[0-9]*:policy/.*$")
self.virtual_mfa_devices = {}
self.account_password_policy = None
+ self.account_summary = AccountSummary(self)
super(IAMBackend, self).__init__()
def _init_managed_policies(self):
@@ -818,6 +990,12 @@ class IAMBackend(BaseBackend):
policy = ManagedPolicy(
policy_name, description=description, document=policy_document, path=path
)
+ if policy.arn in self.managed_policies:
+ raise EntityAlreadyExists(
+ "A policy called {0} already exists. Duplicate names are not allowed.".format(
+ policy_name
+ )
+ )
self.managed_policies[policy.arn] = policy
return policy
@@ -892,6 +1070,10 @@ class IAMBackend(BaseBackend):
permissions_boundary
),
)
+ if [role for role in self.get_roles() if role.name == role_name]:
+ raise EntityAlreadyExists(
+ "Role with name {0} already exists.".format(role_name)
+ )
clean_tags = self._tag_verification(tags)
role = Role(
@@ -1104,11 +1286,17 @@ class IAMBackend(BaseBackend):
raise IAMNotFoundException("Policy not found")
def create_instance_profile(self, name, path, role_ids):
+ if self.instance_profiles.get(name):
+ raise IAMConflictException(
+ code="EntityAlreadyExists",
+ message="Instance Profile {0} already exists.".format(name),
+ )
+
instance_profile_id = random_resource_id()
roles = [iam_backend.get_role_by_id(role_id) for role_id in role_ids]
instance_profile = InstanceProfile(instance_profile_id, name, path, roles)
- self.instance_profiles[instance_profile_id] = instance_profile
+ self.instance_profiles[name] = instance_profile
return instance_profile
def get_instance_profile(self, profile_name):
@@ -1146,7 +1334,7 @@ class IAMBackend(BaseBackend):
def get_all_server_certs(self, marker=None):
return self.certificates.values()
- def upload_server_cert(
+ def upload_server_certificate(
self, cert_name, cert_body, private_key, cert_chain=None, path=None
):
certificate_id = random_resource_id()
@@ -1221,6 +1409,14 @@ class IAMBackend(BaseBackend):
group = self.get_group(group_name)
return group.get_policy(policy_name)
+ def delete_group(self, group_name):
+ try:
+ del self.groups[group_name]
+ except KeyError:
+ raise IAMNotFoundException(
+ "The group with name {0} cannot be found.".format(group_name)
+ )
+
def create_user(self, user_name, path="/"):
if user_name in self.users:
raise IAMConflictException(
@@ -1431,6 +1627,26 @@ class IAMBackend(BaseBackend):
user = self.get_user(user_name)
user.delete_access_key(access_key_id)
+ def upload_ssh_public_key(self, user_name, ssh_public_key_body):
+ user = self.get_user(user_name)
+ return user.upload_ssh_public_key(ssh_public_key_body)
+
+ def get_ssh_public_key(self, user_name, ssh_public_key_id):
+ user = self.get_user(user_name)
+ return user.get_ssh_public_key(ssh_public_key_id)
+
+ def get_all_ssh_public_keys(self, user_name):
+ user = self.get_user(user_name)
+ return user.get_all_ssh_public_keys()
+
+ def update_ssh_public_key(self, user_name, ssh_public_key_id, status):
+ user = self.get_user(user_name)
+ return user.update_ssh_public_key(ssh_public_key_id, status)
+
+ def delete_ssh_public_key(self, user_name, ssh_public_key_id):
+ user = self.get_user(user_name)
+ return user.delete_ssh_public_key(ssh_public_key_id)
+
def enable_mfa_device(
self, user_name, serial_number, authentication_code_1, authentication_code_2
):
@@ -1717,5 +1933,8 @@ class IAMBackend(BaseBackend):
self.account_password_policy = None
+ def get_account_summary(self):
+ return self.account_summary
+
iam_backend = IAMBackend()
diff --git a/moto/iam/responses.py b/moto/iam/responses.py
index 08fe13dc5..ea14bef0f 100644
--- a/moto/iam/responses.py
+++ b/moto/iam/responses.py
@@ -351,7 +351,7 @@ class IamResponse(BaseResponse):
private_key = self._get_param("PrivateKey")
cert_chain = self._get_param("CertificateName")
- cert = iam_backend.upload_server_cert(
+ cert = iam_backend.upload_server_certificate(
cert_name, cert_body, private_key, cert_chain=cert_chain, path=path
)
template = self.response_template(UPLOAD_CERT_TEMPLATE)
@@ -428,6 +428,12 @@ class IamResponse(BaseResponse):
template = self.response_template(GET_GROUP_POLICY_TEMPLATE)
return template.render(name="GetGroupPolicyResponse", **policy_result)
+ def delete_group(self):
+ group_name = self._get_param("GroupName")
+ iam_backend.delete_group(group_name)
+ template = self.response_template(GENERIC_EMPTY_TEMPLATE)
+ return template.render(name="DeleteGroup")
+
def create_user(self):
user_name = self._get_param("UserName")
path = self._get_param("Path")
@@ -584,6 +590,46 @@ class IamResponse(BaseResponse):
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="DeleteAccessKey")
+ def upload_ssh_public_key(self):
+ user_name = self._get_param("UserName")
+ ssh_public_key_body = self._get_param("SSHPublicKeyBody")
+
+ key = iam_backend.upload_ssh_public_key(user_name, ssh_public_key_body)
+ template = self.response_template(UPLOAD_SSH_PUBLIC_KEY_TEMPLATE)
+ return template.render(key=key)
+
+ def get_ssh_public_key(self):
+ user_name = self._get_param("UserName")
+ ssh_public_key_id = self._get_param("SSHPublicKeyId")
+
+ key = iam_backend.get_ssh_public_key(user_name, ssh_public_key_id)
+ template = self.response_template(GET_SSH_PUBLIC_KEY_TEMPLATE)
+ return template.render(key=key)
+
+ def list_ssh_public_keys(self):
+ user_name = self._get_param("UserName")
+
+ keys = iam_backend.get_all_ssh_public_keys(user_name)
+ template = self.response_template(LIST_SSH_PUBLIC_KEYS_TEMPLATE)
+ return template.render(keys=keys)
+
+ def update_ssh_public_key(self):
+ user_name = self._get_param("UserName")
+ ssh_public_key_id = self._get_param("SSHPublicKeyId")
+ status = self._get_param("Status")
+
+ iam_backend.update_ssh_public_key(user_name, ssh_public_key_id, status)
+ template = self.response_template(UPDATE_SSH_PUBLIC_KEY_TEMPLATE)
+ return template.render()
+
+ def delete_ssh_public_key(self):
+ user_name = self._get_param("UserName")
+ ssh_public_key_id = self._get_param("SSHPublicKeyId")
+
+ iam_backend.delete_ssh_public_key(user_name, ssh_public_key_id)
+ template = self.response_template(DELETE_SSH_PUBLIC_KEY_TEMPLATE)
+ return template.render()
+
def deactivate_mfa_device(self):
user_name = self._get_param("UserName")
serial_number = self._get_param("SerialNumber")
@@ -882,6 +928,12 @@ class IamResponse(BaseResponse):
template = self.response_template(DELETE_ACCOUNT_PASSWORD_POLICY_TEMPLATE)
return template.render()
+ def get_account_summary(self):
+ account_summary = iam_backend.get_account_summary()
+
+ template = self.response_template(GET_ACCOUNT_SUMMARY_TEMPLATE)
+ return template.render(summary_map=account_summary.summary_map)
+
LIST_ENTITIES_FOR_POLICY_TEMPLATE = """
@@ -1684,6 +1736,73 @@ GET_ACCESS_KEY_LAST_USED_TEMPLATE = """
"""
+UPLOAD_SSH_PUBLIC_KEY_TEMPLATE = """
+
+
+ {{ key.user_name }}
+ {{ key.ssh_public_key_body }}
+ {{ key.ssh_public_key_id }}
+ {{ key.fingerprint }}
+ {{ key.status }}
+ {{ key.uploaded_iso_8601 }}
+
+
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
+
+GET_SSH_PUBLIC_KEY_TEMPLATE = """
+
+
+ {{ key.user_name }}
+ {{ key.ssh_public_key_body }}
+ {{ key.ssh_public_key_id }}
+ {{ key.fingerprint }}
+ {{ key.status }}
+ {{ key.uploaded_iso_8601 }}
+
+
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
+
+LIST_SSH_PUBLIC_KEYS_TEMPLATE = """
+
+
+ {% for key in keys %}
+
+ {{ key.user_name }}
+ {{ key.ssh_public_key_id }}
+ {{ key.status }}
+ {{ key.uploaded_iso_8601 }}
+
+ {% endfor %}
+
+ false
+
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
+
+UPDATE_SSH_PUBLIC_KEY_TEMPLATE = """
+
+
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
+
+DELETE_SSH_PUBLIC_KEY_TEMPLATE = """
+
+
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
+
CREDENTIAL_REPORT_GENERATING = """
@@ -2255,3 +2374,20 @@ DELETE_ACCOUNT_PASSWORD_POLICY_TEMPLATE = """7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
"""
+
+
+GET_ACCOUNT_SUMMARY_TEMPLATE = """
+
+
+ {% for key, value in summary_map.items() %}
+
+ {{ key }}
+ {{ value }}
+
+ {% endfor %}
+
+
+
+ 85cb9b90-ac28-11e4-a88d-97964EXAMPLE
+
+"""
diff --git a/moto/organizations/models.py b/moto/organizations/models.py
index 37f8bdeb9..d558616d2 100644
--- a/moto/organizations/models.py
+++ b/moto/organizations/models.py
@@ -269,10 +269,32 @@ class OrganizationsBackend(BaseBackend):
)
return account
+ def get_account_by_attr(self, attr, value):
+ account = next(
+ (
+ account
+ for account in self.accounts
+ if hasattr(account, attr) and getattr(account, attr) == value
+ ),
+ None,
+ )
+ if account is None:
+ raise RESTError(
+ "AccountNotFoundException",
+ "You specified an account that doesn't exist.",
+ )
+ return account
+
def describe_account(self, **kwargs):
account = self.get_account_by_id(kwargs["AccountId"])
return account.describe()
+ def describe_create_account_status(self, **kwargs):
+ account = self.get_account_by_attr(
+ "create_account_status_id", kwargs["CreateAccountRequestId"]
+ )
+ return account.create_account_status
+
def list_accounts(self):
return dict(
Accounts=[account.describe()["Account"] for account in self.accounts]
diff --git a/moto/organizations/responses.py b/moto/organizations/responses.py
index 673bf5adb..f9e0b2e04 100644
--- a/moto/organizations/responses.py
+++ b/moto/organizations/responses.py
@@ -65,6 +65,13 @@ class OrganizationsResponse(BaseResponse):
self.organizations_backend.describe_account(**self.request_params)
)
+ def describe_create_account_status(self):
+ return json.dumps(
+ self.organizations_backend.describe_create_account_status(
+ **self.request_params
+ )
+ )
+
def list_accounts(self):
return json.dumps(self.organizations_backend.list_accounts())
diff --git a/moto/packages/httpretty/core.py b/moto/packages/httpretty/core.py
index 0c9635e79..83bd19237 100644
--- a/moto/packages/httpretty/core.py
+++ b/moto/packages/httpretty/core.py
@@ -125,7 +125,7 @@ class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass):
internal `parse_request` method.
It also replaces the `rfile` and `wfile` attributes with StringIO
- instances so that we garantee that it won't make any I/O, neighter
+ instances so that we guarantee that it won't make any I/O, neighter
for writing nor reading.
It has some convenience attributes:
diff --git a/moto/secretsmanager/exceptions.py b/moto/secretsmanager/exceptions.py
index 13f1f2766..bf717e20c 100644
--- a/moto/secretsmanager/exceptions.py
+++ b/moto/secretsmanager/exceptions.py
@@ -14,23 +14,21 @@ class ResourceNotFoundException(SecretsManagerClientError):
)
-# Using specialised exception due to the use of a non-ASCII character
class SecretNotFoundException(SecretsManagerClientError):
def __init__(self):
self.code = 404
super(SecretNotFoundException, self).__init__(
"ResourceNotFoundException",
- message="Secrets Manager can\u2019t find the specified secret.",
+ message="Secrets Manager can't find the specified secret.",
)
-# Using specialised exception due to the use of a non-ASCII character
class SecretHasNoValueException(SecretsManagerClientError):
def __init__(self, version_stage):
self.code = 404
super(SecretHasNoValueException, self).__init__(
"ResourceNotFoundException",
- message="Secrets Manager can\u2019t find the specified secret "
+ message="Secrets Manager can't find the specified secret "
"value for staging label: {}".format(version_stage),
)
diff --git a/moto/server.py b/moto/server.py
index bbc309fe2..92fe6f229 100644
--- a/moto/server.py
+++ b/moto/server.py
@@ -190,7 +190,7 @@ def create_backend_app(service):
index = 2
while endpoint in backend_app.view_functions:
# HACK: Sometimes we map the same view to multiple url_paths. Flask
- # requries us to have different names.
+ # requires us to have different names.
endpoint = original_endpoint + str(index)
index += 1
diff --git a/moto/ses/models.py b/moto/ses/models.py
index 353d6f4b7..eacdd8458 100644
--- a/moto/ses/models.py
+++ b/moto/ses/models.py
@@ -147,7 +147,7 @@ class SESBackend(BaseBackend):
def __type_of_message__(self, destinations):
"""Checks the destination for any special address that could indicate delivery,
- complaint or bounce like in SES simualtor"""
+ complaint or bounce like in SES simulator"""
alladdress = (
destinations.get("ToAddresses", [])
+ destinations.get("CcAddresses", [])
diff --git a/moto/sns/models.py b/moto/sns/models.py
index 949234c27..8b125358d 100644
--- a/moto/sns/models.py
+++ b/moto/sns/models.py
@@ -227,7 +227,7 @@ class Subscription(BaseModel):
return False
for attribute_values in attribute_values:
- # Even the offical documentation states a 5 digits of accuracy after the decimal point for numerics, in reality it is 6
+ # Even the official documentation states a 5 digits of accuracy after the decimal point for numerics, in reality it is 6
# https://docs.aws.amazon.com/sns/latest/dg/sns-subscription-filter-policies.html#subscription-filter-policy-constraints
if int(attribute_values * 1000000) == int(rule * 1000000):
return True
@@ -573,7 +573,7 @@ class SNSBackend(BaseBackend):
combinations = 1
for rules in six.itervalues(value):
combinations *= len(rules)
- # Even the offical documentation states the total combination of values must not exceed 100, in reality it is 150
+ # Even the official documentation states the total combination of values must not exceed 100, in reality it is 150
# https://docs.aws.amazon.com/sns/latest/dg/sns-subscription-filter-policies.html#subscription-filter-policy-constraints
if combinations > 150:
raise SNSInvalidParameter(
diff --git a/moto/sns/responses.py b/moto/sns/responses.py
index 23964c54a..c2eb3e7c3 100644
--- a/moto/sns/responses.py
+++ b/moto/sns/responses.py
@@ -77,7 +77,7 @@ class SNSResponse(BaseResponse):
transform_value = value["StringValue"]
elif "BinaryValue" in value:
transform_value = value["BinaryValue"]
- if not transform_value:
+ if transform_value == "":
raise InvalidParameterValue(
"The message attribute '{0}' must contain non-empty "
"message attribute value for message attribute "
diff --git a/moto/sqs/models.py b/moto/sqs/models.py
index e975c1bae..ca3d41f38 100644
--- a/moto/sqs/models.py
+++ b/moto/sqs/models.py
@@ -761,7 +761,7 @@ class SQSBackend(BaseBackend):
new_messages = []
for message in queue._messages:
- # Only delete message if it is not visible and the reciept_handle
+ # Only delete message if it is not visible and the receipt_handle
# matches.
if message.receipt_handle == receipt_handle:
queue.pending_messages.remove(message)
diff --git a/moto/swf/models/workflow_execution.py b/moto/swf/models/workflow_execution.py
index fca780a41..4d91b1f6f 100644
--- a/moto/swf/models/workflow_execution.py
+++ b/moto/swf/models/workflow_execution.py
@@ -430,7 +430,7 @@ class WorkflowExecution(BaseModel):
)
def fail(self, event_id, details=None, reason=None):
- # TODO: implement lenght constraints on details/reason
+ # TODO: implement length constraints on details/reason
self.execution_status = "CLOSED"
self.close_status = "FAILED"
self.close_timestamp = unix_time()
diff --git a/scripts/implementation_coverage.py b/scripts/implementation_coverage.py
index b3855e0b2..4552ec18e 100755
--- a/scripts/implementation_coverage.py
+++ b/scripts/implementation_coverage.py
@@ -7,16 +7,18 @@ import boto3
script_dir = os.path.dirname(os.path.abspath(__file__))
+alternative_service_names = {'lambda': 'awslambda'}
def get_moto_implementation(service_name):
- service_name_standardized = service_name.replace("-", "") if "-" in service_name else service_name
- if not hasattr(moto, service_name_standardized):
+ service_name = service_name.replace("-", "") if "-" in service_name else service_name
+ alt_service_name = alternative_service_names[service_name] if service_name in alternative_service_names else service_name
+ if not hasattr(moto, alt_service_name):
return None
- module = getattr(moto, service_name_standardized)
+ module = getattr(moto, alt_service_name)
if module is None:
return None
- mock = getattr(module, "mock_{}".format(service_name_standardized))
+ mock = getattr(module, "mock_{}".format(service_name))
if mock is None:
return None
backends = list(mock().backends.values())
diff --git a/setup.py b/setup.py
index a45b2b589..97a6341ff 100755
--- a/setup.py
+++ b/setup.py
@@ -39,7 +39,7 @@ install_requires = [
"werkzeug",
"PyYAML>=5.1",
"pytz",
- "python-dateutil<3.0.0,>=2.1",
+ "python-dateutil<2.8.1,>=2.1",
"python-jose<4.0.0",
"mock",
"docker>=2.5.1",
diff --git a/tests/test_awslambda/test_lambda.py b/tests/test_awslambda/test_lambda.py
index 2f104f049..ab8130a3a 100644
--- a/tests/test_awslambda/test_lambda.py
+++ b/tests/test_awslambda/test_lambda.py
@@ -164,7 +164,7 @@ if settings.TEST_SERVER_MODE:
conn = boto3.client("lambda", "us-west-2")
conn.create_function(
FunctionName="testFunction",
- Runtime="python2.7",
+ Runtime="python3.7",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file2()},
@@ -186,18 +186,20 @@ if settings.TEST_SERVER_MODE:
vol.id,
vol.state,
vol.size,
- json.dumps(in_data),
+ json.dumps(in_data).replace(
+ " ", ""
+ ), # Makes the tests pass as the result is missing the whitespace
)
log_result = base64.b64decode(result["LogResult"]).decode("utf-8")
- # fix for running under travis (TODO: investigate why it has an extra newline)
+ # The Docker lambda invocation will return an additional '\n', so need to replace it:
log_result = log_result.replace("\n\n", "\n")
log_result.should.equal(msg)
payload = result["Payload"].read().decode("utf-8")
- # fix for running under travis (TODO: investigate why it has an extra newline)
+ # The Docker lambda invocation will return an additional '\n', so need to replace it:
payload = payload.replace("\n\n", "\n")
payload.should.equal(msg)
diff --git a/tests/test_core/test_auth.py b/tests/test_core/test_auth.py
index 7dc632188..60d15cf51 100644
--- a/tests/test_core/test_auth.py
+++ b/tests/test_core/test_auth.py
@@ -11,6 +11,7 @@ from nose.tools import assert_raises
from moto import mock_iam, mock_ec2, mock_s3, mock_sts, mock_elbv2, mock_rds2
from moto.core import set_initial_no_auth_action_count
from moto.iam.models import ACCOUNT_ID
+from uuid import uuid4
@mock_iam
@@ -71,8 +72,10 @@ def create_user_with_access_key_and_multiple_policies(
def create_group_with_attached_policy_and_add_user(
- user_name, policy_document, group_name="test-group", policy_name="policy1"
+ user_name, policy_document, group_name="test-group", policy_name=None
):
+ if not policy_name:
+ policy_name = str(uuid4())
client = boto3.client("iam", region_name="us-east-1")
client.create_group(GroupName=group_name)
policy_arn = client.create_policy(
@@ -101,8 +104,10 @@ def create_group_with_multiple_policies_and_add_user(
attached_policy_document,
group_name="test-group",
inline_policy_name="policy1",
- attached_policy_name="policy1",
+ attached_policy_name=None,
):
+ if not attached_policy_name:
+ attached_policy_name = str(uuid4())
client = boto3.client("iam", region_name="us-east-1")
client.create_group(GroupName=group_name)
client.put_group_policy(
@@ -402,10 +407,10 @@ def test_s3_access_denied_with_denying_attached_group_policy():
"Statement": [{"Effect": "Deny", "Action": "s3:List*", "Resource": "*"}],
}
access_key = create_user_with_access_key_and_attached_policy(
- user_name, attached_policy_document
+ user_name, attached_policy_document, policy_name="policy1"
)
create_group_with_attached_policy_and_add_user(
- user_name, group_attached_policy_document
+ user_name, group_attached_policy_document, policy_name="policy2"
)
client = boto3.client(
"s3",
@@ -476,10 +481,16 @@ def test_access_denied_with_many_irrelevant_policies():
"Statement": [{"Effect": "Deny", "Action": "lambda:*", "Resource": "*"}],
}
access_key = create_user_with_access_key_and_multiple_policies(
- user_name, inline_policy_document, attached_policy_document
+ user_name,
+ inline_policy_document,
+ attached_policy_document,
+ attached_policy_name="policy1",
)
create_group_with_multiple_policies_and_add_user(
- user_name, group_inline_policy_document, group_attached_policy_document
+ user_name,
+ group_inline_policy_document,
+ group_attached_policy_document,
+ attached_policy_name="policy2",
)
client = boto3.client(
"ec2",
diff --git a/tests/test_datasync/test_datasync.py b/tests/test_datasync/test_datasync.py
index 825eb7fba..e3ea87675 100644
--- a/tests/test_datasync/test_datasync.py
+++ b/tests/test_datasync/test_datasync.py
@@ -127,6 +127,22 @@ def test_list_locations():
assert response["Locations"][2]["LocationUri"] == "s3://my_bucket/dir"
+@mock_datasync
+def test_delete_location():
+ client = boto3.client("datasync", region_name="us-east-1")
+ locations = create_locations(client, create_smb=True)
+ response = client.list_locations()
+ assert len(response["Locations"]) == 1
+ location_arn = locations["smb_arn"]
+
+ response = client.delete_location(LocationArn=location_arn)
+ response = client.list_locations()
+ assert len(response["Locations"]) == 0
+
+ with assert_raises(ClientError) as e:
+ response = client.delete_location(LocationArn=location_arn)
+
+
@mock_datasync
def test_create_task():
client = boto3.client("datasync", region_name="us-east-1")
@@ -208,6 +224,72 @@ def test_describe_task_not_exist():
client.describe_task(TaskArn="abc")
+@mock_datasync
+def test_update_task():
+ client = boto3.client("datasync", region_name="us-east-1")
+ locations = create_locations(client, create_s3=True, create_smb=True)
+
+ initial_name = "Initial_Name"
+ updated_name = "Updated_Name"
+ initial_options = {
+ "VerifyMode": "NONE",
+ "Atime": "BEST_EFFORT",
+ "Mtime": "PRESERVE",
+ }
+ updated_options = {
+ "VerifyMode": "POINT_IN_TIME_CONSISTENT",
+ "Atime": "BEST_EFFORT",
+ "Mtime": "PRESERVE",
+ }
+ response = client.create_task(
+ SourceLocationArn=locations["smb_arn"],
+ DestinationLocationArn=locations["s3_arn"],
+ Name=initial_name,
+ Options=initial_options,
+ )
+ task_arn = response["TaskArn"]
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["TaskArn"] == task_arn
+ assert response["Name"] == initial_name
+ assert response["Options"] == initial_options
+
+ response = client.update_task(
+ TaskArn=task_arn, Name=updated_name, Options=updated_options
+ )
+
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["TaskArn"] == task_arn
+ assert response["Name"] == updated_name
+ assert response["Options"] == updated_options
+
+ with assert_raises(ClientError) as e:
+ client.update_task(TaskArn="doesnt_exist")
+
+
+@mock_datasync
+def test_delete_task():
+ client = boto3.client("datasync", region_name="us-east-1")
+ locations = create_locations(client, create_s3=True, create_smb=True)
+
+ response = client.create_task(
+ SourceLocationArn=locations["smb_arn"],
+ DestinationLocationArn=locations["s3_arn"],
+ Name="task_name",
+ )
+
+ response = client.list_tasks()
+ assert len(response["Tasks"]) == 1
+ task_arn = response["Tasks"][0]["TaskArn"]
+ assert task_arn is not None
+
+ response = client.delete_task(TaskArn=task_arn)
+ response = client.list_tasks()
+ assert len(response["Tasks"]) == 0
+
+ with assert_raises(ClientError) as e:
+ response = client.delete_task(TaskArn=task_arn)
+
+
@mock_datasync
def test_start_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
@@ -261,6 +343,8 @@ def test_describe_task_execution():
Name="task_name",
)
task_arn = response["TaskArn"]
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["Status"] == "AVAILABLE"
response = client.start_task_execution(TaskArn=task_arn)
task_execution_arn = response["TaskExecutionArn"]
@@ -270,26 +354,38 @@ def test_describe_task_execution():
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "INITIALIZING"
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["Status"] == "RUNNING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "PREPARING"
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["Status"] == "RUNNING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "TRANSFERRING"
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["Status"] == "RUNNING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "VERIFYING"
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["Status"] == "RUNNING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "SUCCESS"
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["Status"] == "AVAILABLE"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "SUCCESS"
+ response = client.describe_task(TaskArn=task_arn)
+ assert response["Status"] == "AVAILABLE"
@mock_datasync
@@ -317,11 +413,13 @@ def test_cancel_task_execution():
response = client.describe_task(TaskArn=task_arn)
assert response["CurrentTaskExecutionArn"] == task_execution_arn
+ assert response["Status"] == "RUNNING"
response = client.cancel_task_execution(TaskExecutionArn=task_execution_arn)
response = client.describe_task(TaskArn=task_arn)
assert "CurrentTaskExecutionArn" not in response
+ assert response["Status"] == "AVAILABLE"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["Status"] == "ERROR"
diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py
index d492b0135..7f6963870 100644
--- a/tests/test_dynamodb2/test_dynamodb.py
+++ b/tests/test_dynamodb2/test_dynamodb.py
@@ -3319,3 +3319,66 @@ def _create_user_table():
TableName="users", Item={"username": {"S": "user3"}, "foo": {"S": "bar"}}
)
return client
+
+
+@mock_dynamodb2
+def test_update_item_if_original_value_is_none():
+ dynamo = boto3.resource("dynamodb", region_name="eu-central-1")
+ dynamo.create_table(
+ AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
+ TableName="origin-rbu-dev",
+ KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
+ ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1},
+ )
+ table = dynamo.Table("origin-rbu-dev")
+ table.put_item(Item={"job_id": "a", "job_name": None})
+ table.update_item(
+ Key={"job_id": "a"},
+ UpdateExpression="SET job_name = :output",
+ ExpressionAttributeValues={":output": "updated",},
+ )
+ table.scan()["Items"][0]["job_name"].should.equal("updated")
+
+
+@mock_dynamodb2
+def test_update_nested_item_if_original_value_is_none():
+ dynamo = boto3.resource("dynamodb", region_name="eu-central-1")
+ dynamo.create_table(
+ AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
+ TableName="origin-rbu-dev",
+ KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
+ ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1},
+ )
+ table = dynamo.Table("origin-rbu-dev")
+ table.put_item(Item={"job_id": "a", "job_details": {"job_name": None}})
+ table.update_item(
+ Key={"job_id": "a"},
+ UpdateExpression="SET job_details.job_name = :output",
+ ExpressionAttributeValues={":output": "updated",},
+ )
+ table.scan()["Items"][0]["job_details"]["job_name"].should.equal("updated")
+
+
+@mock_dynamodb2
+def test_allow_update_to_item_with_different_type():
+ dynamo = boto3.resource("dynamodb", region_name="eu-central-1")
+ dynamo.create_table(
+ AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
+ TableName="origin-rbu-dev",
+ KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
+ ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1},
+ )
+ table = dynamo.Table("origin-rbu-dev")
+ table.put_item(Item={"job_id": "a", "job_details": {"job_name": {"nested": "yes"}}})
+ table.put_item(Item={"job_id": "b", "job_details": {"job_name": {"nested": "yes"}}})
+ table.update_item(
+ Key={"job_id": "a"},
+ UpdateExpression="SET job_details.job_name = :output",
+ ExpressionAttributeValues={":output": "updated"},
+ )
+ table.get_item(Key={"job_id": "a"})["Item"]["job_details"][
+ "job_name"
+ ].should.be.equal("updated")
+ table.get_item(Key={"job_id": "b"})["Item"]["job_details"][
+ "job_name"
+ ].should.be.equal({"nested": "yes"})
diff --git a/tests/test_ec2/test_subnets.py b/tests/test_ec2/test_subnets.py
index f5f1af433..7bb57aab4 100644
--- a/tests/test_ec2/test_subnets.py
+++ b/tests/test_ec2/test_subnets.py
@@ -11,6 +11,7 @@ from boto.exception import EC2ResponseError
from botocore.exceptions import ParamValidationError, ClientError
import json
import sure # noqa
+import random
from moto import mock_cloudformation_deprecated, mock_ec2, mock_ec2_deprecated
@@ -474,3 +475,127 @@ def test_create_subnets_with_overlapping_cidr_blocks():
subnet_cidr_block
)
)
+
+
+@mock_ec2
+def test_available_ip_addresses_in_subnet():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+ client = boto3.client("ec2", region_name="us-west-1")
+
+ vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
+ cidr_range_addresses = [
+ ("10.0.0.0/16", 65531),
+ ("10.0.0.0/17", 32763),
+ ("10.0.0.0/18", 16379),
+ ("10.0.0.0/19", 8187),
+ ("10.0.0.0/20", 4091),
+ ("10.0.0.0/21", 2043),
+ ("10.0.0.0/22", 1019),
+ ("10.0.0.0/23", 507),
+ ("10.0.0.0/24", 251),
+ ("10.0.0.0/25", 123),
+ ("10.0.0.0/26", 59),
+ ("10.0.0.0/27", 27),
+ ("10.0.0.0/28", 11),
+ ]
+ for (cidr, expected_count) in cidr_range_addresses:
+ validate_subnet_details(client, vpc, cidr, expected_count)
+
+
+@mock_ec2
+def test_available_ip_addresses_in_subnet_with_enis():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+ client = boto3.client("ec2", region_name="us-west-1")
+
+ vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
+ # Verify behaviour for various CIDR ranges (...)
+ # Don't try to assign ENIs to /27 and /28, as there are not a lot of IP addresses to go around
+ cidr_range_addresses = [
+ ("10.0.0.0/16", 65531),
+ ("10.0.0.0/17", 32763),
+ ("10.0.0.0/18", 16379),
+ ("10.0.0.0/19", 8187),
+ ("10.0.0.0/20", 4091),
+ ("10.0.0.0/21", 2043),
+ ("10.0.0.0/22", 1019),
+ ("10.0.0.0/23", 507),
+ ("10.0.0.0/24", 251),
+ ("10.0.0.0/25", 123),
+ ("10.0.0.0/26", 59),
+ ]
+ for (cidr, expected_count) in cidr_range_addresses:
+ validate_subnet_details_after_creating_eni(client, vpc, cidr, expected_count)
+
+
+def validate_subnet_details(client, vpc, cidr, expected_ip_address_count):
+ subnet = client.create_subnet(
+ VpcId=vpc.id, CidrBlock=cidr, AvailabilityZone="us-west-1b"
+ )["Subnet"]
+ subnet["AvailableIpAddressCount"].should.equal(expected_ip_address_count)
+ client.delete_subnet(SubnetId=subnet["SubnetId"])
+
+
+def validate_subnet_details_after_creating_eni(
+ client, vpc, cidr, expected_ip_address_count
+):
+ subnet = client.create_subnet(
+ VpcId=vpc.id, CidrBlock=cidr, AvailabilityZone="us-west-1b"
+ )["Subnet"]
+ # Create a random number of Elastic Network Interfaces
+ nr_of_eni_to_create = random.randint(0, 5)
+ ip_addresses_assigned = 0
+ enis_created = []
+ for i in range(0, nr_of_eni_to_create):
+ # Create a random number of IP addresses per ENI
+ nr_of_ip_addresses = random.randint(1, 5)
+ if nr_of_ip_addresses == 1:
+ # Pick the first available IP address (First 4 are reserved by AWS)
+ private_address = "10.0.0." + str(ip_addresses_assigned + 4)
+ eni = client.create_network_interface(
+ SubnetId=subnet["SubnetId"], PrivateIpAddress=private_address
+ )["NetworkInterface"]
+ enis_created.append(eni)
+ ip_addresses_assigned = ip_addresses_assigned + 1
+ else:
+ # Assign a list of IP addresses
+ private_addresses = [
+ "10.0.0." + str(4 + ip_addresses_assigned + i)
+ for i in range(0, nr_of_ip_addresses)
+ ]
+ eni = client.create_network_interface(
+ SubnetId=subnet["SubnetId"],
+ PrivateIpAddresses=[
+ {"PrivateIpAddress": address} for address in private_addresses
+ ],
+ )["NetworkInterface"]
+ enis_created.append(eni)
+ ip_addresses_assigned = ip_addresses_assigned + nr_of_ip_addresses + 1 #
+ # Verify that the nr of available IP addresses takes these ENIs into account
+ updated_subnet = client.describe_subnets(SubnetIds=[subnet["SubnetId"]])["Subnets"][
+ 0
+ ]
+ private_addresses = [
+ eni["PrivateIpAddress"] for eni in enis_created if eni["PrivateIpAddress"]
+ ]
+ for eni in enis_created:
+ private_addresses.extend(
+ [address["PrivateIpAddress"] for address in eni["PrivateIpAddresses"]]
+ )
+ error_msg = (
+ "Nr of IP addresses for Subnet with CIDR {0} is incorrect. Expected: {1}, Actual: {2}. "
+ "Addresses: {3}"
+ )
+ with sure.ensure(
+ error_msg,
+ cidr,
+ str(expected_ip_address_count),
+ updated_subnet["AvailableIpAddressCount"],
+ str(private_addresses),
+ ):
+ updated_subnet["AvailableIpAddressCount"].should.equal(
+ expected_ip_address_count - ip_addresses_assigned
+ )
+ # Clean up, as we have to create a few more subnets that shouldn't interfere with each other
+ for eni in enis_created:
+ client.delete_network_interface(NetworkInterfaceId=eni["NetworkInterfaceId"])
+ client.delete_subnet(SubnetId=subnet["SubnetId"])
diff --git a/tests/test_ec2/test_vpcs.py b/tests/test_ec2/test_vpcs.py
index 0894a8b8e..1bc3ddd98 100644
--- a/tests/test_ec2/test_vpcs.py
+++ b/tests/test_ec2/test_vpcs.py
@@ -678,3 +678,150 @@ def test_create_vpc_with_invalid_cidr_range():
"An error occurred (InvalidVpc.Range) when calling the CreateVpc "
"operation: The CIDR '{}' is invalid.".format(vpc_cidr_block)
)
+
+
+@mock_ec2
+def test_enable_vpc_classic_link():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.1.0.0/16")
+
+ response = ec2.meta.client.enable_vpc_classic_link(VpcId=vpc.id)
+ assert response.get("Return").should.be.true
+
+
+@mock_ec2
+def test_enable_vpc_classic_link_failure():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.90.0.0/16")
+
+ response = ec2.meta.client.enable_vpc_classic_link(VpcId=vpc.id)
+ assert response.get("Return").should.be.false
+
+
+@mock_ec2
+def test_disable_vpc_classic_link():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
+
+ ec2.meta.client.enable_vpc_classic_link(VpcId=vpc.id)
+ response = ec2.meta.client.disable_vpc_classic_link(VpcId=vpc.id)
+ assert response.get("Return").should.be.false
+
+
+@mock_ec2
+def test_describe_classic_link_enabled():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
+
+ ec2.meta.client.enable_vpc_classic_link(VpcId=vpc.id)
+ response = ec2.meta.client.describe_vpc_classic_link(VpcIds=[vpc.id])
+ assert response.get("Vpcs")[0].get("ClassicLinkEnabled").should.be.true
+
+
+@mock_ec2
+def test_describe_classic_link_disabled():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.90.0.0/16")
+
+ response = ec2.meta.client.describe_vpc_classic_link(VpcIds=[vpc.id])
+ assert response.get("Vpcs")[0].get("ClassicLinkEnabled").should.be.false
+
+
+@mock_ec2
+def test_describe_classic_link_multiple():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc1 = ec2.create_vpc(CidrBlock="10.90.0.0/16")
+ vpc2 = ec2.create_vpc(CidrBlock="10.0.0.0/16")
+
+ ec2.meta.client.enable_vpc_classic_link(VpcId=vpc2.id)
+ response = ec2.meta.client.describe_vpc_classic_link(VpcIds=[vpc1.id, vpc2.id])
+ expected = [
+ {"VpcId": vpc1.id, "ClassicLinkDnsSupported": False},
+ {"VpcId": vpc2.id, "ClassicLinkDnsSupported": True},
+ ]
+
+ # Ensure response is sorted, because they can come in random order
+ assert response.get("Vpcs").sort(key=lambda x: x["VpcId"]) == expected.sort(
+ key=lambda x: x["VpcId"]
+ )
+
+
+@mock_ec2
+def test_enable_vpc_classic_link_dns_support():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.1.0.0/16")
+
+ response = ec2.meta.client.enable_vpc_classic_link_dns_support(VpcId=vpc.id)
+ assert response.get("Return").should.be.true
+
+
+@mock_ec2
+def test_disable_vpc_classic_link_dns_support():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
+
+ ec2.meta.client.enable_vpc_classic_link_dns_support(VpcId=vpc.id)
+ response = ec2.meta.client.disable_vpc_classic_link_dns_support(VpcId=vpc.id)
+ assert response.get("Return").should.be.false
+
+
+@mock_ec2
+def test_describe_classic_link_dns_support_enabled():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
+
+ ec2.meta.client.enable_vpc_classic_link_dns_support(VpcId=vpc.id)
+ response = ec2.meta.client.describe_vpc_classic_link_dns_support(VpcIds=[vpc.id])
+ assert response.get("Vpcs")[0].get("ClassicLinkDnsSupported").should.be.true
+
+
+@mock_ec2
+def test_describe_classic_link_dns_support_disabled():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc = ec2.create_vpc(CidrBlock="10.90.0.0/16")
+
+ response = ec2.meta.client.describe_vpc_classic_link_dns_support(VpcIds=[vpc.id])
+ assert response.get("Vpcs")[0].get("ClassicLinkDnsSupported").should.be.false
+
+
+@mock_ec2
+def test_describe_classic_link_dns_support_multiple():
+ ec2 = boto3.resource("ec2", region_name="us-west-1")
+
+ # Create VPC
+ vpc1 = ec2.create_vpc(CidrBlock="10.90.0.0/16")
+ vpc2 = ec2.create_vpc(CidrBlock="10.0.0.0/16")
+
+ ec2.meta.client.enable_vpc_classic_link_dns_support(VpcId=vpc2.id)
+ response = ec2.meta.client.describe_vpc_classic_link_dns_support(
+ VpcIds=[vpc1.id, vpc2.id]
+ )
+ expected = [
+ {"VpcId": vpc1.id, "ClassicLinkDnsSupported": False},
+ {"VpcId": vpc2.id, "ClassicLinkDnsSupported": True},
+ ]
+
+ # Ensure response is sorted, because they can come in random order
+ assert response.get("Vpcs").sort(key=lambda x: x["VpcId"]) == expected.sort(
+ key=lambda x: x["VpcId"]
+ )
diff --git a/tests/test_events/test_events.py b/tests/test_events/test_events.py
index d5bfdf782..5f81e2cf6 100644
--- a/tests/test_events/test_events.py
+++ b/tests/test_events/test_events.py
@@ -1,6 +1,7 @@
import random
import boto3
import json
+import sure # noqa
from moto.events import mock_events
from botocore.exceptions import ClientError
@@ -204,6 +205,53 @@ def test_permissions():
assert resp_policy["Statement"][0]["Sid"] == "Account1"
+@mock_events
+def test_put_permission_errors():
+ client = boto3.client("events", "us-east-1")
+ client.create_event_bus(Name="test-bus")
+
+ client.put_permission.when.called_with(
+ EventBusName="non-existing",
+ Action="events:PutEvents",
+ Principal="111111111111",
+ StatementId="test",
+ ).should.throw(ClientError, "Event bus non-existing does not exist.")
+
+ client.put_permission.when.called_with(
+ EventBusName="test-bus",
+ Action="events:PutPermission",
+ Principal="111111111111",
+ StatementId="test",
+ ).should.throw(
+ ClientError, "Provided value in parameter 'action' is not supported."
+ )
+
+
+@mock_events
+def test_remove_permission_errors():
+ client = boto3.client("events", "us-east-1")
+ client.create_event_bus(Name="test-bus")
+
+ client.remove_permission.when.called_with(
+ EventBusName="non-existing", StatementId="test"
+ ).should.throw(ClientError, "Event bus non-existing does not exist.")
+
+ client.remove_permission.when.called_with(
+ EventBusName="test-bus", StatementId="test"
+ ).should.throw(ClientError, "EventBus does not have a policy.")
+
+ client.put_permission(
+ EventBusName="test-bus",
+ Action="events:PutEvents",
+ Principal="111111111111",
+ StatementId="test",
+ )
+
+ client.remove_permission.when.called_with(
+ EventBusName="test-bus", StatementId="non-existing"
+ ).should.throw(ClientError, "Statement with the provided id does not exist.")
+
+
@mock_events
def test_put_events():
client = boto3.client("events", "eu-central-1")
@@ -220,3 +268,177 @@ def test_put_events():
with assert_raises(ClientError):
client.put_events(Entries=[event] * 20)
+
+
+@mock_events
+def test_create_event_bus():
+ client = boto3.client("events", "us-east-1")
+ response = client.create_event_bus(Name="test-bus")
+
+ response["EventBusArn"].should.equal(
+ "arn:aws:events:us-east-1:123456789012:event-bus/test-bus"
+ )
+
+
+@mock_events
+def test_create_event_bus_errors():
+ client = boto3.client("events", "us-east-1")
+ client.create_event_bus(Name="test-bus")
+
+ client.create_event_bus.when.called_with(Name="test-bus").should.throw(
+ ClientError, "Event bus test-bus already exists."
+ )
+
+ # the 'default' name is already used for the account's default event bus.
+ client.create_event_bus.when.called_with(Name="default").should.throw(
+ ClientError, "Event bus default already exists."
+ )
+
+ # non partner event buses can't contain the '/' character
+ client.create_event_bus.when.called_with(Name="test/test-bus").should.throw(
+ ClientError, "Event bus name must not contain '/'."
+ )
+
+ client.create_event_bus.when.called_with(
+ Name="aws.partner/test/test-bus", EventSourceName="aws.partner/test/test-bus"
+ ).should.throw(
+ ClientError, "Event source aws.partner/test/test-bus does not exist."
+ )
+
+
+@mock_events
+def test_describe_event_bus():
+ client = boto3.client("events", "us-east-1")
+
+ response = client.describe_event_bus()
+
+ response["Name"].should.equal("default")
+ response["Arn"].should.equal(
+ "arn:aws:events:us-east-1:123456789012:event-bus/default"
+ )
+ response.should_not.have.key("Policy")
+
+ client.create_event_bus(Name="test-bus")
+ client.put_permission(
+ EventBusName="test-bus",
+ Action="events:PutEvents",
+ Principal="111111111111",
+ StatementId="test",
+ )
+
+ response = client.describe_event_bus(Name="test-bus")
+
+ response["Name"].should.equal("test-bus")
+ response["Arn"].should.equal(
+ "arn:aws:events:us-east-1:123456789012:event-bus/test-bus"
+ )
+ json.loads(response["Policy"]).should.equal(
+ {
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "test",
+ "Effect": "Allow",
+ "Principal": {"AWS": "arn:aws:iam::111111111111:root"},
+ "Action": "events:PutEvents",
+ "Resource": "arn:aws:events:us-east-1:123456789012:event-bus/test-bus",
+ }
+ ],
+ }
+ )
+
+
+@mock_events
+def test_describe_event_bus_errors():
+ client = boto3.client("events", "us-east-1")
+
+ client.describe_event_bus.when.called_with(Name="non-existing").should.throw(
+ ClientError, "Event bus non-existing does not exist."
+ )
+
+
+@mock_events
+def test_list_event_buses():
+ client = boto3.client("events", "us-east-1")
+ client.create_event_bus(Name="test-bus-1")
+ client.create_event_bus(Name="test-bus-2")
+ client.create_event_bus(Name="other-bus-1")
+ client.create_event_bus(Name="other-bus-2")
+
+ response = client.list_event_buses()
+
+ response["EventBuses"].should.have.length_of(5)
+ sorted(response["EventBuses"], key=lambda i: i["Name"]).should.equal(
+ [
+ {
+ "Name": "default",
+ "Arn": "arn:aws:events:us-east-1:123456789012:event-bus/default",
+ },
+ {
+ "Name": "other-bus-1",
+ "Arn": "arn:aws:events:us-east-1:123456789012:event-bus/other-bus-1",
+ },
+ {
+ "Name": "other-bus-2",
+ "Arn": "arn:aws:events:us-east-1:123456789012:event-bus/other-bus-2",
+ },
+ {
+ "Name": "test-bus-1",
+ "Arn": "arn:aws:events:us-east-1:123456789012:event-bus/test-bus-1",
+ },
+ {
+ "Name": "test-bus-2",
+ "Arn": "arn:aws:events:us-east-1:123456789012:event-bus/test-bus-2",
+ },
+ ]
+ )
+
+ response = client.list_event_buses(NamePrefix="other-bus")
+
+ response["EventBuses"].should.have.length_of(2)
+ sorted(response["EventBuses"], key=lambda i: i["Name"]).should.equal(
+ [
+ {
+ "Name": "other-bus-1",
+ "Arn": "arn:aws:events:us-east-1:123456789012:event-bus/other-bus-1",
+ },
+ {
+ "Name": "other-bus-2",
+ "Arn": "arn:aws:events:us-east-1:123456789012:event-bus/other-bus-2",
+ },
+ ]
+ )
+
+
+@mock_events
+def test_delete_event_bus():
+ client = boto3.client("events", "us-east-1")
+ client.create_event_bus(Name="test-bus")
+
+ response = client.list_event_buses()
+ response["EventBuses"].should.have.length_of(2)
+
+ client.delete_event_bus(Name="test-bus")
+
+ response = client.list_event_buses()
+ response["EventBuses"].should.have.length_of(1)
+ response["EventBuses"].should.equal(
+ [
+ {
+ "Name": "default",
+ "Arn": "arn:aws:events:us-east-1:123456789012:event-bus/default",
+ }
+ ]
+ )
+
+ # deleting non existing event bus should be successful
+ client.delete_event_bus(Name="non-existing")
+
+
+@mock_events
+def test_delete_event_bus_errors():
+ client = boto3.client("events", "us-east-1")
+
+ client.delete_event_bus.when.called_with(Name="default").should.throw(
+ ClientError, "Cannot delete event bus default."
+ )
diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py
index e0d8fdb82..366ea3620 100644
--- a/tests/test_iam/test_iam.py
+++ b/tests/test_iam/test_iam.py
@@ -18,6 +18,7 @@ from nose.tools import raises
from datetime import datetime
from tests.helpers import requires_boto_gte
+from uuid import uuid4
MOCK_CERT = """-----BEGIN CERTIFICATE-----
@@ -169,6 +170,14 @@ def test_create_role_and_instance_profile():
profile.path.should.equal("/")
+@mock_iam
+def test_create_instance_profile_should_throw_when_name_is_not_unique():
+ conn = boto3.client("iam", region_name="us-east-1")
+ conn.create_instance_profile(InstanceProfileName="unique-instance-profile")
+ with assert_raises(ClientError):
+ conn.create_instance_profile(InstanceProfileName="unique-instance-profile")
+
+
@mock_iam_deprecated()
def test_remove_role_from_instance_profile():
conn = boto.connect_iam()
@@ -400,6 +409,21 @@ def test_create_policy():
)
+@mock_iam
+def test_create_policy_already_exists():
+ conn = boto3.client("iam", region_name="us-east-1")
+ response = conn.create_policy(
+ PolicyName="TestCreatePolicy", PolicyDocument=MOCK_POLICY
+ )
+ with assert_raises(conn.exceptions.EntityAlreadyExistsException) as ex:
+ response = conn.create_policy(
+ PolicyName="TestCreatePolicy", PolicyDocument=MOCK_POLICY
+ )
+ ex.exception.response["Error"]["Code"].should.equal("EntityAlreadyExists")
+ ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(409)
+ ex.exception.response["Error"]["Message"].should.contain("TestCreatePolicy")
+
+
@mock_iam
def test_delete_policy():
conn = boto3.client("iam", region_name="us-east-1")
@@ -1292,6 +1316,122 @@ def test_get_access_key_last_used():
resp["UserName"].should.equal(create_key_response["UserName"])
+@mock_iam
+def test_upload_ssh_public_key():
+ iam = boto3.resource("iam", region_name="us-east-1")
+ client = iam.meta.client
+ username = "test-user"
+ iam.create_user(UserName=username)
+ public_key = MOCK_CERT
+
+ resp = client.upload_ssh_public_key(UserName=username, SSHPublicKeyBody=public_key)
+ pubkey = resp["SSHPublicKey"]
+ pubkey["SSHPublicKeyBody"].should.equal(public_key)
+ pubkey["UserName"].should.equal(username)
+ pubkey["SSHPublicKeyId"].should.have.length_of(20)
+ assert pubkey["SSHPublicKeyId"].startswith("APKA")
+ pubkey.should.have.key("Fingerprint")
+ pubkey["Status"].should.equal("Active")
+ (
+ datetime.utcnow() - pubkey["UploadDate"].replace(tzinfo=None)
+ ).seconds.should.be.within(0, 10)
+
+
+@mock_iam
+def test_get_ssh_public_key():
+ iam = boto3.resource("iam", region_name="us-east-1")
+ client = iam.meta.client
+ username = "test-user"
+ iam.create_user(UserName=username)
+ public_key = MOCK_CERT
+
+ with assert_raises(ClientError):
+ client.get_ssh_public_key(
+ UserName=username, SSHPublicKeyId="xxnon-existent-keyxx", Encoding="SSH"
+ )
+
+ resp = client.upload_ssh_public_key(UserName=username, SSHPublicKeyBody=public_key)
+ ssh_public_key_id = resp["SSHPublicKey"]["SSHPublicKeyId"]
+
+ resp = client.get_ssh_public_key(
+ UserName=username, SSHPublicKeyId=ssh_public_key_id, Encoding="SSH"
+ )
+ resp["SSHPublicKey"]["SSHPublicKeyBody"].should.equal(public_key)
+
+
+@mock_iam
+def test_list_ssh_public_keys():
+ iam = boto3.resource("iam", region_name="us-east-1")
+ client = iam.meta.client
+ username = "test-user"
+ iam.create_user(UserName=username)
+ public_key = MOCK_CERT
+
+ resp = client.list_ssh_public_keys(UserName=username)
+ resp["SSHPublicKeys"].should.have.length_of(0)
+
+ resp = client.upload_ssh_public_key(UserName=username, SSHPublicKeyBody=public_key)
+ ssh_public_key_id = resp["SSHPublicKey"]["SSHPublicKeyId"]
+
+ resp = client.list_ssh_public_keys(UserName=username)
+ resp["SSHPublicKeys"].should.have.length_of(1)
+ resp["SSHPublicKeys"][0]["SSHPublicKeyId"].should.equal(ssh_public_key_id)
+
+
+@mock_iam
+def test_update_ssh_public_key():
+ iam = boto3.resource("iam", region_name="us-east-1")
+ client = iam.meta.client
+ username = "test-user"
+ iam.create_user(UserName=username)
+ public_key = MOCK_CERT
+
+ with assert_raises(ClientError):
+ client.update_ssh_public_key(
+ UserName=username, SSHPublicKeyId="xxnon-existent-keyxx", Status="Inactive"
+ )
+
+ resp = client.upload_ssh_public_key(UserName=username, SSHPublicKeyBody=public_key)
+ ssh_public_key_id = resp["SSHPublicKey"]["SSHPublicKeyId"]
+ resp["SSHPublicKey"]["Status"].should.equal("Active")
+
+ resp = client.update_ssh_public_key(
+ UserName=username, SSHPublicKeyId=ssh_public_key_id, Status="Inactive"
+ )
+
+ resp = client.get_ssh_public_key(
+ UserName=username, SSHPublicKeyId=ssh_public_key_id, Encoding="SSH"
+ )
+ resp["SSHPublicKey"]["Status"].should.equal("Inactive")
+
+
+@mock_iam
+def test_delete_ssh_public_key():
+ iam = boto3.resource("iam", region_name="us-east-1")
+ client = iam.meta.client
+ username = "test-user"
+ iam.create_user(UserName=username)
+ public_key = MOCK_CERT
+
+ with assert_raises(ClientError):
+ client.delete_ssh_public_key(
+ UserName=username, SSHPublicKeyId="xxnon-existent-keyxx"
+ )
+
+ resp = client.upload_ssh_public_key(UserName=username, SSHPublicKeyBody=public_key)
+ ssh_public_key_id = resp["SSHPublicKey"]["SSHPublicKeyId"]
+
+ resp = client.list_ssh_public_keys(UserName=username)
+ resp["SSHPublicKeys"].should.have.length_of(1)
+
+ resp = client.delete_ssh_public_key(
+ UserName=username, SSHPublicKeyId=ssh_public_key_id
+ )
+
+ resp = client.list_ssh_public_keys(UserName=username)
+ resp["SSHPublicKeys"].should.have.length_of(0)
+
+
@mock_iam
def test_get_account_authorization_details():
test_policy = json.dumps(
@@ -2027,6 +2167,42 @@ def test_create_role_with_permissions_boundary():
conn.list_roles().get("Roles")[0].get("PermissionsBoundary").should.equal(expected)
+@mock_iam
+def test_create_role_with_same_name_should_fail():
+ iam = boto3.client("iam", region_name="us-east-1")
+ test_role_name = str(uuid4())
+ iam.create_role(
+ RoleName=test_role_name, AssumeRolePolicyDocument="policy", Description="test"
+ )
+ # Create the role again, and verify that it fails
+ with assert_raises(ClientError) as err:
+ iam.create_role(
+ RoleName=test_role_name,
+ AssumeRolePolicyDocument="policy",
+ Description="test",
+ )
+ err.exception.response["Error"]["Code"].should.equal("EntityAlreadyExists")
+ err.exception.response["Error"]["Message"].should.equal(
+ "Role with name {0} already exists.".format(test_role_name)
+ )
+
+
+@mock_iam
+def test_create_policy_with_same_name_should_fail():
+ iam = boto3.client("iam", region_name="us-east-1")
+ test_policy_name = str(uuid4())
+ policy = iam.create_policy(PolicyName=test_policy_name, PolicyDocument=MOCK_POLICY)
+ # Create the role again, and verify that it fails
+ with assert_raises(ClientError) as err:
+ iam.create_policy(PolicyName=test_policy_name, PolicyDocument=MOCK_POLICY)
+ err.exception.response["Error"]["Code"].should.equal("EntityAlreadyExists")
+ err.exception.response["Error"]["Message"].should.equal(
+ "A policy called {0} already exists. Duplicate names are not allowed.".format(
+ test_policy_name
+ )
+ )
+
+
@mock_iam
def test_create_open_id_connect_provider():
client = boto3.client("iam", region_name="us-east-1")
@@ -2302,3 +2478,123 @@ def test_delete_account_password_policy_errors():
client.delete_account_password_policy.when.called_with().should.throw(
ClientError, "The account policy with name PasswordPolicy cannot be found."
)
+
+
+@mock_iam
+def test_get_account_summary():
+ client = boto3.client("iam", region_name="us-east-1")
+ iam = boto3.resource("iam", region_name="us-east-1")
+
+ account_summary = iam.AccountSummary()
+
+ account_summary.summary_map.should.equal(
+ {
+ "GroupPolicySizeQuota": 5120,
+ "InstanceProfilesQuota": 1000,
+ "Policies": 0,
+ "GroupsPerUserQuota": 10,
+ "InstanceProfiles": 0,
+ "AttachedPoliciesPerUserQuota": 10,
+ "Users": 0,
+ "PoliciesQuota": 1500,
+ "Providers": 0,
+ "AccountMFAEnabled": 0,
+ "AccessKeysPerUserQuota": 2,
+ "AssumeRolePolicySizeQuota": 2048,
+ "PolicyVersionsInUseQuota": 10000,
+ "GlobalEndpointTokenVersion": 1,
+ "VersionsPerPolicyQuota": 5,
+ "AttachedPoliciesPerGroupQuota": 10,
+ "PolicySizeQuota": 6144,
+ "Groups": 0,
+ "AccountSigningCertificatesPresent": 0,
+ "UsersQuota": 5000,
+ "ServerCertificatesQuota": 20,
+ "MFADevices": 0,
+ "UserPolicySizeQuota": 2048,
+ "PolicyVersionsInUse": 0,
+ "ServerCertificates": 0,
+ "Roles": 0,
+ "RolesQuota": 1000,
+ "SigningCertificatesPerUserQuota": 2,
+ "MFADevicesInUse": 0,
+ "RolePolicySizeQuota": 10240,
+ "AttachedPoliciesPerRoleQuota": 10,
+ "AccountAccessKeysPresent": 0,
+ "GroupsQuota": 300,
+ }
+ )
+
+ client.create_instance_profile(InstanceProfileName="test-profile")
+ client.create_open_id_connect_provider(
+ Url="https://example.com", ThumbprintList=[],
+ )
+ response_policy = client.create_policy(
+ PolicyName="test-policy", PolicyDocument=MOCK_POLICY
+ )
+ client.create_role(RoleName="test-role", AssumeRolePolicyDocument="test policy")
+ client.attach_role_policy(
+ RoleName="test-role", PolicyArn=response_policy["Policy"]["Arn"]
+ )
+ client.create_saml_provider(
+ Name="TestSAMLProvider", SAMLMetadataDocument="a" * 1024
+ )
+ client.create_group(GroupName="test-group")
+ client.attach_group_policy(
+ GroupName="test-group", PolicyArn=response_policy["Policy"]["Arn"]
+ )
+ client.create_user(UserName="test-user")
+ client.attach_user_policy(
+ UserName="test-user", PolicyArn=response_policy["Policy"]["Arn"]
+ )
+ client.enable_mfa_device(
+ UserName="test-user",
+ SerialNumber="123456789",
+ AuthenticationCode1="234567",
+ AuthenticationCode2="987654",
+ )
+ client.create_virtual_mfa_device(VirtualMFADeviceName="test-device")
+ client.upload_server_certificate(
+ ServerCertificateName="test-cert",
+ CertificateBody="cert-body",
+ PrivateKey="private-key",
+ )
+ account_summary.load()
+
+ account_summary.summary_map.should.equal(
+ {
+ "GroupPolicySizeQuota": 5120,
+ "InstanceProfilesQuota": 1000,
+ "Policies": 1,
+ "GroupsPerUserQuota": 10,
+ "InstanceProfiles": 1,
+ "AttachedPoliciesPerUserQuota": 10,
+ "Users": 1,
+ "PoliciesQuota": 1500,
+ "Providers": 2,
+ "AccountMFAEnabled": 0,
+ "AccessKeysPerUserQuota": 2,
+ "AssumeRolePolicySizeQuota": 2048,
+ "PolicyVersionsInUseQuota": 10000,
+ "GlobalEndpointTokenVersion": 1,
+ "VersionsPerPolicyQuota": 5,
+ "AttachedPoliciesPerGroupQuota": 10,
+ "PolicySizeQuota": 6144,
+ "Groups": 1,
+ "AccountSigningCertificatesPresent": 0,
+ "UsersQuota": 5000,
+ "ServerCertificatesQuota": 20,
+ "MFADevices": 1,
+ "UserPolicySizeQuota": 2048,
+ "PolicyVersionsInUse": 3,
+ "ServerCertificates": 1,
+ "Roles": 1,
+ "RolesQuota": 1000,
+ "SigningCertificatesPerUserQuota": 2,
+ "MFADevicesInUse": 1,
+ "RolePolicySizeQuota": 10240,
+ "AttachedPoliciesPerRoleQuota": 10,
+ "AccountAccessKeysPresent": 0,
+ "GroupsQuota": 300,
+ }
+ )
diff --git a/tests/test_iam/test_iam_groups.py b/tests/test_iam/test_iam_groups.py
index 7fd299281..7b73e89ea 100644
--- a/tests/test_iam/test_iam_groups.py
+++ b/tests/test_iam/test_iam_groups.py
@@ -8,6 +8,7 @@ import sure # noqa
from nose.tools import assert_raises
from boto.exception import BotoServerError
+from botocore.exceptions import ClientError
from moto import mock_iam, mock_iam_deprecated
MOCK_POLICY = """
@@ -182,3 +183,25 @@ def test_list_group_policies():
conn.list_group_policies(GroupName="my-group")["PolicyNames"].should.equal(
["my-policy"]
)
+
+
+@mock_iam
+def test_delete_group():
+ conn = boto3.client("iam", region_name="us-east-1")
+ conn.create_group(GroupName="my-group")
+ groups = conn.list_groups()
+ assert groups["Groups"][0]["GroupName"] == "my-group"
+ assert len(groups["Groups"]) == 1
+ conn.delete_group(GroupName="my-group")
+ conn.list_groups()["Groups"].should.be.empty
+
+
+@mock_iam
+def test_delete_unknown_group():
+ conn = boto3.client("iam", region_name="us-east-1")
+ with assert_raises(ClientError) as err:
+ conn.delete_group(GroupName="unknown-group")
+ err.exception.response["Error"]["Code"].should.equal("NoSuchEntity")
+ err.exception.response["Error"]["Message"].should.equal(
+ "The group with name unknown-group cannot be found."
+ )
diff --git a/tests/test_organizations/test_organizations_boto3.py b/tests/test_organizations/test_organizations_boto3.py
index f8eb1328e..3b4a51557 100644
--- a/tests/test_organizations/test_organizations_boto3.py
+++ b/tests/test_organizations/test_organizations_boto3.py
@@ -159,6 +159,17 @@ def test_create_account():
create_status["AccountName"].should.equal(mockname)
+@mock_organizations
+def test_describe_create_account_status():
+ client = boto3.client("organizations", region_name="us-east-1")
+ client.create_organization(FeatureSet="ALL")["Organization"]
+ request_id = client.create_account(AccountName=mockname, Email=mockemail)[
+ "CreateAccountStatus"
+ ]["Id"]
+ response = client.describe_create_account_status(CreateAccountRequestId=request_id)
+ validate_create_account_status(response["CreateAccountStatus"])
+
+
@mock_organizations
def test_describe_account():
client = boto3.client("organizations", region_name="us-east-1")
diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py
index bf688ec12..a7c7a6862 100644
--- a/tests/test_secretsmanager/test_secretsmanager.py
+++ b/tests/test_secretsmanager/test_secretsmanager.py
@@ -45,7 +45,7 @@ def test_get_secret_that_does_not_exist():
result = conn.get_secret_value(SecretId="i-dont-exist")
assert_equal(
- "Secrets Manager can\u2019t find the specified secret.",
+ "Secrets Manager can't find the specified secret.",
cm.exception.response["Error"]["Message"],
)
@@ -61,7 +61,7 @@ def test_get_secret_that_does_not_match():
result = conn.get_secret_value(SecretId="i-dont-match")
assert_equal(
- "Secrets Manager can\u2019t find the specified secret.",
+ "Secrets Manager can't find the specified secret.",
cm.exception.response["Error"]["Message"],
)
@@ -88,7 +88,7 @@ def test_get_secret_that_has_no_value():
result = conn.get_secret_value(SecretId="java-util-test-password")
assert_equal(
- "Secrets Manager can\u2019t find the specified secret value for staging label: AWSCURRENT",
+ "Secrets Manager can't find the specified secret value for staging label: AWSCURRENT",
cm.exception.response["Error"]["Message"],
)
diff --git a/tests/test_secretsmanager/test_server.py b/tests/test_secretsmanager/test_server.py
index 89cb90185..9501c7c7c 100644
--- a/tests/test_secretsmanager/test_server.py
+++ b/tests/test_secretsmanager/test_server.py
@@ -48,9 +48,7 @@ def test_get_secret_that_does_not_exist():
headers={"X-Amz-Target": "secretsmanager.GetSecretValue"},
)
json_data = json.loads(get_secret.data.decode("utf-8"))
- assert (
- json_data["message"] == "Secrets Manager can\u2019t find the specified secret."
- )
+ assert json_data["message"] == "Secrets Manager can't find the specified secret."
assert json_data["__type"] == "ResourceNotFoundException"
@@ -70,9 +68,7 @@ def test_get_secret_that_does_not_match():
headers={"X-Amz-Target": "secretsmanager.GetSecretValue"},
)
json_data = json.loads(get_secret.data.decode("utf-8"))
- assert (
- json_data["message"] == "Secrets Manager can\u2019t find the specified secret."
- )
+ assert json_data["message"] == "Secrets Manager can't find the specified secret."
assert json_data["__type"] == "ResourceNotFoundException"
@@ -95,7 +91,7 @@ def test_get_secret_that_has_no_value():
json_data = json.loads(get_secret.data.decode("utf-8"))
assert (
json_data["message"]
- == "Secrets Manager can\u2019t find the specified secret value for staging label: AWSCURRENT"
+ == "Secrets Manager can't find the specified secret value for staging label: AWSCURRENT"
)
assert json_data["__type"] == "ResourceNotFoundException"
@@ -178,9 +174,7 @@ def test_describe_secret_that_does_not_exist():
)
json_data = json.loads(describe_secret.data.decode("utf-8"))
- assert (
- json_data["message"] == "Secrets Manager can\u2019t find the specified secret."
- )
+ assert json_data["message"] == "Secrets Manager can't find the specified secret."
assert json_data["__type"] == "ResourceNotFoundException"
@@ -202,9 +196,7 @@ def test_describe_secret_that_does_not_match():
)
json_data = json.loads(describe_secret.data.decode("utf-8"))
- assert (
- json_data["message"] == "Secrets Manager can\u2019t find the specified secret."
- )
+ assert json_data["message"] == "Secrets Manager can't find the specified secret."
assert json_data["__type"] == "ResourceNotFoundException"
@@ -306,9 +298,7 @@ def test_rotate_secret_that_does_not_exist():
)
json_data = json.loads(rotate_secret.data.decode("utf-8"))
- assert (
- json_data["message"] == "Secrets Manager can\u2019t find the specified secret."
- )
+ assert json_data["message"] == "Secrets Manager can't find the specified secret."
assert json_data["__type"] == "ResourceNotFoundException"
@@ -330,9 +320,7 @@ def test_rotate_secret_that_does_not_match():
)
json_data = json.loads(rotate_secret.data.decode("utf-8"))
- assert (
- json_data["message"] == "Secrets Manager can\u2019t find the specified secret."
- )
+ assert json_data["message"] == "Secrets Manager can't find the specified secret."
assert json_data["__type"] == "ResourceNotFoundException"
diff --git a/tests/test_sns/test_publishing_boto3.py b/tests/test_sns/test_publishing_boto3.py
index 64669d5e0..5bda0720c 100644
--- a/tests/test_sns/test_publishing_boto3.py
+++ b/tests/test_sns/test_publishing_boto3.py
@@ -173,6 +173,27 @@ def test_publish_to_sqs_msg_attr_byte_value():
)
+@mock_sqs
+@mock_sns
+def test_publish_to_sqs_msg_attr_number_type():
+ sns = boto3.resource("sns", region_name="us-east-1")
+ topic = sns.create_topic(Name="test-topic")
+ sqs = boto3.resource("sqs", region_name="us-east-1")
+ queue = sqs.create_queue(QueueName="test-queue")
+ topic.subscribe(Protocol="sqs", Endpoint=queue.attributes["QueueArn"])
+
+ topic.publish(
+ Message="test message",
+ MessageAttributes={"retries": {"DataType": "Number", "StringValue": "0"}},
+ )
+
+ message = json.loads(queue.receive_messages()[0].body)
+ message["Message"].should.equal("test message")
+ message["MessageAttributes"].should.equal(
+ {"retries": {"Type": "Number", "Value": 0}}
+ )
+
+
@mock_sns
def test_publish_sms():
client = boto3.client("sns", region_name="us-east-1")