Merge pull request #10 from spulec/master

Merge upstream
This commit is contained in:
Bert Blommers 2019-10-23 08:00:02 +01:00 committed by GitHub
commit b927ec99b5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 1256 additions and 250 deletions

View File

@ -146,12 +146,15 @@
- [ ] delete_domain_association - [ ] delete_domain_association
- [ ] delete_job - [ ] delete_job
- [ ] delete_webhook - [ ] delete_webhook
- [ ] generate_access_logs
- [ ] get_app - [ ] get_app
- [ ] get_artifact_url
- [ ] get_branch - [ ] get_branch
- [ ] get_domain_association - [ ] get_domain_association
- [ ] get_job - [ ] get_job
- [ ] get_webhook - [ ] get_webhook
- [ ] list_apps - [ ] list_apps
- [ ] list_artifacts
- [ ] list_branches - [ ] list_branches
- [ ] list_domain_associations - [ ] list_domain_associations
- [ ] list_jobs - [ ] list_jobs
@ -292,6 +295,8 @@
## apigatewaymanagementapi ## apigatewaymanagementapi
0% implemented 0% implemented
- [ ] delete_connection
- [ ] get_connection
- [ ] post_to_connection - [ ] post_to_connection
## apigatewayv2 ## apigatewayv2
@ -385,6 +390,7 @@
- [ ] list_applications - [ ] list_applications
- [ ] list_components - [ ] list_components
- [ ] list_problems - [ ] list_problems
- [ ] update_application
- [ ] update_component - [ ] update_component
- [ ] update_component_configuration - [ ] update_component_configuration
@ -509,11 +515,11 @@
- [ ] update_type - [ ] update_type
## athena ## athena
0% implemented 10% implemented
- [ ] batch_get_named_query - [ ] batch_get_named_query
- [ ] batch_get_query_execution - [ ] batch_get_query_execution
- [ ] create_named_query - [ ] create_named_query
- [ ] create_work_group - [X] create_work_group
- [ ] delete_named_query - [ ] delete_named_query
- [ ] delete_work_group - [ ] delete_work_group
- [ ] get_named_query - [ ] get_named_query
@ -523,7 +529,7 @@
- [ ] list_named_queries - [ ] list_named_queries
- [ ] list_query_executions - [ ] list_query_executions
- [ ] list_tags_for_resource - [ ] list_tags_for_resource
- [ ] list_work_groups - [X] list_work_groups
- [ ] start_query_execution - [ ] start_query_execution
- [ ] stop_query_execution - [ ] stop_query_execution
- [ ] tag_resource - [ ] tag_resource
@ -1069,6 +1075,7 @@
## codecommit ## codecommit
0% implemented 0% implemented
- [ ] batch_describe_merge_conflicts - [ ] batch_describe_merge_conflicts
- [ ] batch_get_commits
- [ ] batch_get_repositories - [ ] batch_get_repositories
- [ ] create_branch - [ ] create_branch
- [ ] create_commit - [ ] create_commit
@ -1232,7 +1239,7 @@
- [ ] update_user_profile - [ ] update_user_profile
## cognito-identity ## cognito-identity
23% implemented 28% implemented
- [X] create_identity_pool - [X] create_identity_pool
- [ ] delete_identities - [ ] delete_identities
- [ ] delete_identity_pool - [ ] delete_identity_pool
@ -1428,13 +1435,22 @@
## comprehendmedical ## comprehendmedical
0% implemented 0% implemented
- [ ] describe_entities_detection_v2_job
- [ ] describe_phi_detection_job
- [ ] detect_entities - [ ] detect_entities
- [ ] detect_entities_v2
- [ ] detect_phi - [ ] detect_phi
- [ ] list_entities_detection_v2_jobs
- [ ] list_phi_detection_jobs
- [ ] start_entities_detection_v2_job
- [ ] start_phi_detection_job
- [ ] stop_entities_detection_v2_job
- [ ] stop_phi_detection_job
## config ## config
24% implemented 31% implemented
- [ ] batch_get_aggregate_resource_config - [X] batch_get_aggregate_resource_config
- [ ] batch_get_resource_config - [X] batch_get_resource_config
- [X] delete_aggregation_authorization - [X] delete_aggregation_authorization
- [ ] delete_config_rule - [ ] delete_config_rule
- [X] delete_configuration_aggregator - [X] delete_configuration_aggregator
@ -1444,6 +1460,7 @@
- [ ] delete_organization_config_rule - [ ] delete_organization_config_rule
- [ ] delete_pending_aggregation_request - [ ] delete_pending_aggregation_request
- [ ] delete_remediation_configuration - [ ] delete_remediation_configuration
- [ ] delete_remediation_exceptions
- [ ] delete_retention_configuration - [ ] delete_retention_configuration
- [ ] deliver_config_snapshot - [ ] deliver_config_snapshot
- [ ] describe_aggregate_compliance_by_config_rules - [ ] describe_aggregate_compliance_by_config_rules
@ -1462,6 +1479,7 @@
- [ ] describe_organization_config_rules - [ ] describe_organization_config_rules
- [ ] describe_pending_aggregation_requests - [ ] describe_pending_aggregation_requests
- [ ] describe_remediation_configurations - [ ] describe_remediation_configurations
- [ ] describe_remediation_exceptions
- [ ] describe_remediation_execution_status - [ ] describe_remediation_execution_status
- [ ] describe_retention_configurations - [ ] describe_retention_configurations
- [ ] get_aggregate_compliance_details_by_config_rule - [ ] get_aggregate_compliance_details_by_config_rule
@ -1474,9 +1492,9 @@
- [ ] get_compliance_summary_by_resource_type - [ ] get_compliance_summary_by_resource_type
- [ ] get_discovered_resource_counts - [ ] get_discovered_resource_counts
- [ ] get_organization_config_rule_detailed_status - [ ] get_organization_config_rule_detailed_status
- [ ] get_resource_config_history - [X] get_resource_config_history
- [ ] list_aggregate_discovered_resources - [X] list_aggregate_discovered_resources
- [ ] list_discovered_resources - [X] list_discovered_resources
- [ ] list_tags_for_resource - [ ] list_tags_for_resource
- [X] put_aggregation_authorization - [X] put_aggregation_authorization
- [ ] put_config_rule - [ ] put_config_rule
@ -1486,6 +1504,7 @@
- [ ] put_evaluations - [ ] put_evaluations
- [ ] put_organization_config_rule - [ ] put_organization_config_rule
- [ ] put_remediation_configurations - [ ] put_remediation_configurations
- [ ] put_remediation_exceptions
- [ ] put_retention_configuration - [ ] put_retention_configuration
- [ ] select_resource_config - [ ] select_resource_config
- [ ] start_config_rules_evaluation - [ ] start_config_rules_evaluation
@ -1523,6 +1542,7 @@
0% implemented 0% implemented
- [ ] delete_report_definition - [ ] delete_report_definition
- [ ] describe_report_definitions - [ ] describe_report_definitions
- [ ] modify_report_definition
- [ ] put_report_definition - [ ] put_report_definition
## datapipeline ## datapipeline
@ -1554,6 +1574,7 @@
- [ ] create_location_efs - [ ] create_location_efs
- [ ] create_location_nfs - [ ] create_location_nfs
- [ ] create_location_s3 - [ ] create_location_s3
- [ ] create_location_smb
- [ ] create_task - [ ] create_task
- [ ] delete_agent - [ ] delete_agent
- [ ] delete_location - [ ] delete_location
@ -1562,6 +1583,7 @@
- [ ] describe_location_efs - [ ] describe_location_efs
- [ ] describe_location_nfs - [ ] describe_location_nfs
- [ ] describe_location_s3 - [ ] describe_location_s3
- [ ] describe_location_smb
- [ ] describe_task - [ ] describe_task
- [ ] describe_task_execution - [ ] describe_task_execution
- [ ] list_agents - [ ] list_agents
@ -1771,6 +1793,7 @@
- [ ] create_replication_subnet_group - [ ] create_replication_subnet_group
- [ ] create_replication_task - [ ] create_replication_task
- [ ] delete_certificate - [ ] delete_certificate
- [ ] delete_connection
- [ ] delete_endpoint - [ ] delete_endpoint
- [ ] delete_event_subscription - [ ] delete_event_subscription
- [ ] delete_replication_instance - [ ] delete_replication_instance
@ -1826,6 +1849,7 @@
- [ ] delete_db_cluster_snapshot - [ ] delete_db_cluster_snapshot
- [ ] delete_db_instance - [ ] delete_db_instance
- [ ] delete_db_subnet_group - [ ] delete_db_subnet_group
- [ ] describe_certificates
- [ ] describe_db_cluster_parameter_groups - [ ] describe_db_cluster_parameter_groups
- [ ] describe_db_cluster_parameters - [ ] describe_db_cluster_parameters
- [ ] describe_db_cluster_snapshot_attributes - [ ] describe_db_cluster_snapshot_attributes
@ -2061,6 +2085,7 @@
- [X] delete_network_interface - [X] delete_network_interface
- [ ] delete_network_interface_permission - [ ] delete_network_interface_permission
- [ ] delete_placement_group - [ ] delete_placement_group
- [ ] delete_queued_reserved_instances
- [X] delete_route - [X] delete_route
- [X] delete_route_table - [X] delete_route_table
- [X] delete_security_group - [X] delete_security_group
@ -2105,6 +2130,7 @@
- [X] describe_dhcp_options - [X] describe_dhcp_options
- [ ] describe_egress_only_internet_gateways - [ ] describe_egress_only_internet_gateways
- [ ] describe_elastic_gpus - [ ] describe_elastic_gpus
- [ ] describe_export_image_tasks
- [ ] describe_export_tasks - [ ] describe_export_tasks
- [ ] describe_fleet_history - [ ] describe_fleet_history
- [ ] describe_fleet_instances - [ ] describe_fleet_instances
@ -2210,6 +2236,7 @@
- [ ] enable_vpc_classic_link_dns_support - [ ] enable_vpc_classic_link_dns_support
- [ ] export_client_vpn_client_certificate_revocation_list - [ ] export_client_vpn_client_certificate_revocation_list
- [ ] export_client_vpn_client_configuration - [ ] export_client_vpn_client_configuration
- [ ] export_image
- [ ] export_transit_gateway_routes - [ ] export_transit_gateway_routes
- [ ] get_capacity_reservation_usage - [ ] get_capacity_reservation_usage
- [ ] get_console_output - [ ] get_console_output
@ -2263,6 +2290,8 @@
- [ ] modify_vpc_peering_connection_options - [ ] modify_vpc_peering_connection_options
- [ ] modify_vpc_tenancy - [ ] modify_vpc_tenancy
- [ ] modify_vpn_connection - [ ] modify_vpn_connection
- [ ] modify_vpn_tunnel_certificate
- [ ] modify_vpn_tunnel_options
- [ ] monitor_instances - [ ] monitor_instances
- [ ] move_address_to_vpc - [ ] move_address_to_vpc
- [ ] provision_byoip_cidr - [ ] provision_byoip_cidr
@ -2298,6 +2327,7 @@
- [ ] run_instances - [ ] run_instances
- [ ] run_scheduled_instances - [ ] run_scheduled_instances
- [ ] search_transit_gateway_routes - [ ] search_transit_gateway_routes
- [ ] send_diagnostic_interrupt
- [X] start_instances - [X] start_instances
- [X] stop_instances - [X] stop_instances
- [ ] terminate_client_vpn_connections - [ ] terminate_client_vpn_connections
@ -2343,7 +2373,7 @@
- [ ] upload_layer_part - [ ] upload_layer_part
## ecs ## ecs
49% implemented 66% implemented
- [X] create_cluster - [X] create_cluster
- [X] create_service - [X] create_service
- [ ] create_task_set - [ ] create_task_set
@ -2381,8 +2411,9 @@
- [ ] submit_attachment_state_changes - [ ] submit_attachment_state_changes
- [ ] submit_container_state_change - [ ] submit_container_state_change
- [ ] submit_task_state_change - [ ] submit_task_state_change
- [x] tag_resource - [X] tag_resource
- [x] untag_resource - [X] untag_resource
- [ ] update_cluster_settings
- [ ] update_container_agent - [ ] update_container_agent
- [X] update_container_instances_state - [X] update_container_instances_state
- [X] update_service - [X] update_service
@ -2413,7 +2444,10 @@
- [ ] describe_cluster - [ ] describe_cluster
- [ ] describe_update - [ ] describe_update
- [ ] list_clusters - [ ] list_clusters
- [ ] list_tags_for_resource
- [ ] list_updates - [ ] list_updates
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_cluster_config - [ ] update_cluster_config
- [ ] update_cluster_version - [ ] update_cluster_version
@ -2603,7 +2637,7 @@
- [X] set_subnets - [X] set_subnets
## emr ## emr
55% implemented 51% implemented
- [ ] add_instance_fleet - [ ] add_instance_fleet
- [X] add_instance_groups - [X] add_instance_groups
- [X] add_job_flow_steps - [X] add_job_flow_steps
@ -2615,6 +2649,7 @@
- [X] describe_job_flows - [X] describe_job_flows
- [ ] describe_security_configuration - [ ] describe_security_configuration
- [X] describe_step - [X] describe_step
- [ ] get_block_public_access_configuration
- [X] list_bootstrap_actions - [X] list_bootstrap_actions
- [X] list_clusters - [X] list_clusters
- [ ] list_instance_fleets - [ ] list_instance_fleets
@ -2625,6 +2660,7 @@
- [ ] modify_instance_fleet - [ ] modify_instance_fleet
- [X] modify_instance_groups - [X] modify_instance_groups
- [ ] put_auto_scaling_policy - [ ] put_auto_scaling_policy
- [ ] put_block_public_access_configuration
- [ ] remove_auto_scaling_policy - [ ] remove_auto_scaling_policy
- [X] remove_tags - [X] remove_tags
- [X] run_job_flow - [X] run_job_flow
@ -2724,6 +2760,39 @@
- [ ] put_notification_channel - [ ] put_notification_channel
- [ ] put_policy - [ ] put_policy
## forecast
0% implemented
- [ ] create_dataset
- [ ] create_dataset_group
- [ ] create_dataset_import_job
- [ ] create_forecast
- [ ] create_forecast_export_job
- [ ] create_predictor
- [ ] delete_dataset
- [ ] delete_dataset_group
- [ ] delete_dataset_import_job
- [ ] delete_forecast
- [ ] delete_forecast_export_job
- [ ] delete_predictor
- [ ] describe_dataset
- [ ] describe_dataset_group
- [ ] describe_dataset_import_job
- [ ] describe_forecast
- [ ] describe_forecast_export_job
- [ ] describe_predictor
- [ ] get_accuracy_metrics
- [ ] list_dataset_groups
- [ ] list_dataset_import_jobs
- [ ] list_datasets
- [ ] list_forecast_export_jobs
- [ ] list_forecasts
- [ ] list_predictors
- [ ] update_dataset_group
## forecastquery
0% implemented
- [ ] query_forecast
## fsx ## fsx
0% implemented 0% implemented
- [ ] create_backup - [ ] create_backup
@ -2871,7 +2940,7 @@
- [ ] update_listener - [ ] update_listener
## glue ## glue
5% implemented 4% implemented
- [ ] batch_create_partition - [ ] batch_create_partition
- [ ] batch_delete_connection - [ ] batch_delete_connection
- [ ] batch_delete_partition - [ ] batch_delete_partition
@ -2884,12 +2953,14 @@
- [ ] batch_get_triggers - [ ] batch_get_triggers
- [ ] batch_get_workflows - [ ] batch_get_workflows
- [ ] batch_stop_job_run - [ ] batch_stop_job_run
- [ ] cancel_ml_task_run
- [ ] create_classifier - [ ] create_classifier
- [ ] create_connection - [ ] create_connection
- [ ] create_crawler - [ ] create_crawler
- [X] create_database - [X] create_database
- [ ] create_dev_endpoint - [ ] create_dev_endpoint
- [ ] create_job - [ ] create_job
- [ ] create_ml_transform
- [ ] create_partition - [ ] create_partition
- [ ] create_script - [ ] create_script
- [ ] create_security_configuration - [ ] create_security_configuration
@ -2903,6 +2974,7 @@
- [ ] delete_database - [ ] delete_database
- [ ] delete_dev_endpoint - [ ] delete_dev_endpoint
- [ ] delete_job - [ ] delete_job
- [ ] delete_ml_transform
- [ ] delete_partition - [ ] delete_partition
- [ ] delete_resource_policy - [ ] delete_resource_policy
- [ ] delete_security_configuration - [ ] delete_security_configuration
@ -2927,11 +2999,14 @@
- [ ] get_dev_endpoints - [ ] get_dev_endpoints
- [ ] get_job - [ ] get_job
- [ ] get_job_bookmark - [ ] get_job_bookmark
- [ ] get_job_bookmarks
- [ ] get_job_run - [ ] get_job_run
- [ ] get_job_runs - [ ] get_job_runs
- [ ] get_jobs - [ ] get_jobs
- [ ] get_mapping - [ ] get_mapping
- [ ] get_ml_task_run
- [ ] get_ml_task_runs
- [ ] get_ml_transform
- [ ] get_ml_transforms
- [ ] get_partition - [ ] get_partition
- [ ] get_partitions - [ ] get_partitions
- [ ] get_plan - [ ] get_plan
@ -2961,9 +3036,14 @@
- [ ] put_resource_policy - [ ] put_resource_policy
- [ ] put_workflow_run_properties - [ ] put_workflow_run_properties
- [ ] reset_job_bookmark - [ ] reset_job_bookmark
- [ ] search_tables
- [ ] start_crawler - [ ] start_crawler
- [ ] start_crawler_schedule - [ ] start_crawler_schedule
- [ ] start_export_labels_task_run
- [ ] start_import_labels_task_run
- [ ] start_job_run - [ ] start_job_run
- [ ] start_ml_evaluation_task_run
- [ ] start_ml_labeling_set_generation_task_run
- [ ] start_trigger - [ ] start_trigger
- [ ] start_workflow_run - [ ] start_workflow_run
- [ ] stop_crawler - [ ] stop_crawler
@ -2978,6 +3058,7 @@
- [ ] update_database - [ ] update_database
- [ ] update_dev_endpoint - [ ] update_dev_endpoint
- [ ] update_job - [ ] update_job
- [ ] update_ml_transform
- [ ] update_partition - [ ] update_partition
- [ ] update_table - [ ] update_table
- [ ] update_trigger - [ ] update_trigger
@ -3163,7 +3244,7 @@
- [ ] describe_events - [ ] describe_events
## iam ## iam
57% implemented 61% implemented
- [ ] add_client_id_to_open_id_connect_provider - [ ] add_client_id_to_open_id_connect_provider
- [X] add_role_to_instance_profile - [X] add_role_to_instance_profile
- [X] add_user_to_group - [X] add_user_to_group
@ -3184,7 +3265,7 @@
- [ ] create_service_linked_role - [ ] create_service_linked_role
- [ ] create_service_specific_credential - [ ] create_service_specific_credential
- [X] create_user - [X] create_user
- [ ] create_virtual_mfa_device - [X] create_virtual_mfa_device
- [X] deactivate_mfa_device - [X] deactivate_mfa_device
- [X] delete_access_key - [X] delete_access_key
- [X] delete_account_alias - [X] delete_account_alias
@ -3208,7 +3289,7 @@
- [X] delete_user - [X] delete_user
- [ ] delete_user_permissions_boundary - [ ] delete_user_permissions_boundary
- [X] delete_user_policy - [X] delete_user_policy
- [ ] delete_virtual_mfa_device - [X] delete_virtual_mfa_device
- [X] detach_group_policy - [X] detach_group_policy
- [X] detach_role_policy - [X] detach_role_policy
- [X] detach_user_policy - [X] detach_user_policy
@ -3268,7 +3349,7 @@
- [X] list_user_policies - [X] list_user_policies
- [ ] list_user_tags - [ ] list_user_tags
- [X] list_users - [X] list_users
- [ ] list_virtual_mfa_devices - [X] list_virtual_mfa_devices
- [X] put_group_policy - [X] put_group_policy
- [ ] put_role_permissions_boundary - [ ] put_role_permissions_boundary
- [X] put_role_policy - [X] put_role_policy
@ -3355,7 +3436,7 @@
- [ ] update_assessment_target - [ ] update_assessment_target
## iot ## iot
24% implemented 23% implemented
- [ ] accept_certificate_transfer - [ ] accept_certificate_transfer
- [ ] add_thing_to_billing_group - [ ] add_thing_to_billing_group
- [X] add_thing_to_thing_group - [X] add_thing_to_thing_group
@ -3364,6 +3445,7 @@
- [X] attach_principal_policy - [X] attach_principal_policy
- [ ] attach_security_profile - [ ] attach_security_profile
- [X] attach_thing_principal - [X] attach_thing_principal
- [ ] cancel_audit_mitigation_actions_task
- [ ] cancel_audit_task - [ ] cancel_audit_task
- [ ] cancel_certificate_transfer - [ ] cancel_certificate_transfer
- [ ] cancel_job - [ ] cancel_job
@ -3375,6 +3457,7 @@
- [ ] create_dynamic_thing_group - [ ] create_dynamic_thing_group
- [X] create_job - [X] create_job
- [X] create_keys_and_certificate - [X] create_keys_and_certificate
- [ ] create_mitigation_action
- [ ] create_ota_update - [ ] create_ota_update
- [X] create_policy - [X] create_policy
- [ ] create_policy_version - [ ] create_policy_version
@ -3394,6 +3477,7 @@
- [ ] delete_dynamic_thing_group - [ ] delete_dynamic_thing_group
- [ ] delete_job - [ ] delete_job
- [ ] delete_job_execution - [ ] delete_job_execution
- [ ] delete_mitigation_action
- [ ] delete_ota_update - [ ] delete_ota_update
- [X] delete_policy - [X] delete_policy
- [ ] delete_policy_version - [ ] delete_policy_version
@ -3409,6 +3493,8 @@
- [ ] delete_v2_logging_level - [ ] delete_v2_logging_level
- [ ] deprecate_thing_type - [ ] deprecate_thing_type
- [ ] describe_account_audit_configuration - [ ] describe_account_audit_configuration
- [ ] describe_audit_finding
- [ ] describe_audit_mitigation_actions_task
- [ ] describe_audit_task - [ ] describe_audit_task
- [ ] describe_authorizer - [ ] describe_authorizer
- [ ] describe_billing_group - [ ] describe_billing_group
@ -3420,6 +3506,7 @@
- [ ] describe_index - [ ] describe_index
- [X] describe_job - [X] describe_job
- [ ] describe_job_execution - [ ] describe_job_execution
- [ ] describe_mitigation_action
- [ ] describe_role_alias - [ ] describe_role_alias
- [ ] describe_scheduled_audit - [ ] describe_scheduled_audit
- [ ] describe_security_profile - [ ] describe_security_profile
@ -3448,6 +3535,8 @@
- [ ] list_active_violations - [ ] list_active_violations
- [ ] list_attached_policies - [ ] list_attached_policies
- [ ] list_audit_findings - [ ] list_audit_findings
- [ ] list_audit_mitigation_actions_executions
- [ ] list_audit_mitigation_actions_tasks
- [ ] list_audit_tasks - [ ] list_audit_tasks
- [ ] list_authorizers - [ ] list_authorizers
- [ ] list_billing_groups - [ ] list_billing_groups
@ -3458,6 +3547,7 @@
- [ ] list_job_executions_for_job - [ ] list_job_executions_for_job
- [ ] list_job_executions_for_thing - [ ] list_job_executions_for_thing
- [ ] list_jobs - [ ] list_jobs
- [ ] list_mitigation_actions
- [ ] list_ota_updates - [ ] list_ota_updates
- [ ] list_outgoing_certificates - [ ] list_outgoing_certificates
- [X] list_policies - [X] list_policies
@ -3498,6 +3588,7 @@
- [ ] set_logging_options - [ ] set_logging_options
- [ ] set_v2_logging_level - [ ] set_v2_logging_level
- [ ] set_v2_logging_options - [ ] set_v2_logging_options
- [ ] start_audit_mitigation_actions_task
- [ ] start_on_demand_audit_task - [ ] start_on_demand_audit_task
- [ ] start_thing_registration_task - [ ] start_thing_registration_task
- [ ] stop_thing_registration_task - [ ] stop_thing_registration_task
@ -3515,6 +3606,7 @@
- [ ] update_event_configurations - [ ] update_event_configurations
- [ ] update_indexing_configuration - [ ] update_indexing_configuration
- [ ] update_job - [ ] update_job
- [ ] update_mitigation_action
- [ ] update_role_alias - [ ] update_role_alias
- [ ] update_scheduled_audit - [ ] update_scheduled_audit
- [ ] update_security_profile - [ ] update_security_profile
@ -3692,6 +3784,7 @@
- [ ] list_tags_for_resource - [ ] list_tags_for_resource
- [ ] tag_resource - [ ] tag_resource
- [ ] untag_resource - [ ] untag_resource
- [ ] update_broker_count
- [ ] update_broker_storage - [ ] update_broker_storage
- [ ] update_cluster_configuration - [ ] update_cluster_configuration
@ -3801,7 +3894,7 @@
- [ ] update_stream - [ ] update_stream
## kms ## kms
54% implemented 48% implemented
- [X] cancel_key_deletion - [X] cancel_key_deletion
- [ ] connect_custom_key_store - [ ] connect_custom_key_store
- [ ] create_alias - [ ] create_alias
@ -3821,8 +3914,8 @@
- [X] enable_key_rotation - [X] enable_key_rotation
- [X] encrypt - [X] encrypt
- [X] generate_data_key - [X] generate_data_key
- [X] generate_data_key_without_plaintext - [ ] generate_data_key_without_plaintext
- [X] generate_random - [ ] generate_random
- [X] get_key_policy - [X] get_key_policy
- [X] get_key_rotation_status - [X] get_key_rotation_status
- [ ] get_parameters_for_import - [ ] get_parameters_for_import
@ -3844,6 +3937,22 @@
- [ ] update_custom_key_store - [ ] update_custom_key_store
- [X] update_key_description - [X] update_key_description
## lakeformation
0% implemented
- [ ] batch_grant_permissions
- [ ] batch_revoke_permissions
- [ ] deregister_resource
- [ ] describe_resource
- [ ] get_data_lake_settings
- [ ] get_effective_permissions_for_path
- [ ] grant_permissions
- [ ] list_permissions
- [ ] list_resources
- [ ] put_data_lake_settings
- [ ] register_resource
- [ ] revoke_permissions
- [ ] update_resource
## lambda ## lambda
0% implemented 0% implemented
- [ ] add_layer_version_permission - [ ] add_layer_version_permission
@ -3927,8 +4036,11 @@
## lex-runtime ## lex-runtime
0% implemented 0% implemented
- [ ] delete_session
- [ ] get_session
- [ ] post_content - [ ] post_content
- [ ] post_text - [ ] post_text
- [ ] put_session
## license-manager ## license-manager
0% implemented 0% implemented
@ -3972,6 +4084,7 @@
- [ ] create_relational_database - [ ] create_relational_database
- [ ] create_relational_database_from_snapshot - [ ] create_relational_database_from_snapshot
- [ ] create_relational_database_snapshot - [ ] create_relational_database_snapshot
- [ ] delete_auto_snapshot
- [ ] delete_disk - [ ] delete_disk
- [ ] delete_disk_snapshot - [ ] delete_disk_snapshot
- [ ] delete_domain - [ ] delete_domain
@ -3987,9 +4100,12 @@
- [ ] detach_disk - [ ] detach_disk
- [ ] detach_instances_from_load_balancer - [ ] detach_instances_from_load_balancer
- [ ] detach_static_ip - [ ] detach_static_ip
- [ ] disable_add_on
- [ ] download_default_key_pair - [ ] download_default_key_pair
- [ ] enable_add_on
- [ ] export_snapshot - [ ] export_snapshot
- [ ] get_active_names - [ ] get_active_names
- [ ] get_auto_snapshots
- [ ] get_blueprints - [ ] get_blueprints
- [ ] get_bundles - [ ] get_bundles
- [ ] get_cloud_formation_stack_records - [ ] get_cloud_formation_stack_records
@ -4053,7 +4169,7 @@
- [ ] update_relational_database_parameters - [ ] update_relational_database_parameters
## logs ## logs
28% implemented 35% implemented
- [ ] associate_kms_key - [ ] associate_kms_key
- [ ] cancel_export_task - [ ] cancel_export_task
- [ ] create_export_task - [ ] create_export_task
@ -4250,12 +4366,15 @@
## mediapackage ## mediapackage
0% implemented 0% implemented
- [ ] create_channel - [ ] create_channel
- [ ] create_harvest_job
- [ ] create_origin_endpoint - [ ] create_origin_endpoint
- [ ] delete_channel - [ ] delete_channel
- [ ] delete_origin_endpoint - [ ] delete_origin_endpoint
- [ ] describe_channel - [ ] describe_channel
- [ ] describe_harvest_job
- [ ] describe_origin_endpoint - [ ] describe_origin_endpoint
- [ ] list_channels - [ ] list_channels
- [ ] list_harvest_jobs
- [ ] list_origin_endpoints - [ ] list_origin_endpoints
- [ ] list_tags_for_resource - [ ] list_tags_for_resource
- [ ] rotate_channel_credentials - [ ] rotate_channel_credentials
@ -4686,9 +4805,12 @@
0% implemented 0% implemented
- [ ] create_app - [ ] create_app
- [ ] create_campaign - [ ] create_campaign
- [ ] create_email_template
- [ ] create_export_job - [ ] create_export_job
- [ ] create_import_job - [ ] create_import_job
- [ ] create_push_template
- [ ] create_segment - [ ] create_segment
- [ ] create_sms_template
- [ ] delete_adm_channel - [ ] delete_adm_channel
- [ ] delete_apns_channel - [ ] delete_apns_channel
- [ ] delete_apns_sandbox_channel - [ ] delete_apns_sandbox_channel
@ -4698,11 +4820,14 @@
- [ ] delete_baidu_channel - [ ] delete_baidu_channel
- [ ] delete_campaign - [ ] delete_campaign
- [ ] delete_email_channel - [ ] delete_email_channel
- [ ] delete_email_template
- [ ] delete_endpoint - [ ] delete_endpoint
- [ ] delete_event_stream - [ ] delete_event_stream
- [ ] delete_gcm_channel - [ ] delete_gcm_channel
- [ ] delete_push_template
- [ ] delete_segment - [ ] delete_segment
- [ ] delete_sms_channel - [ ] delete_sms_channel
- [ ] delete_sms_template
- [ ] delete_user_endpoints - [ ] delete_user_endpoints
- [ ] delete_voice_channel - [ ] delete_voice_channel
- [ ] get_adm_channel - [ ] get_adm_channel
@ -4723,6 +4848,7 @@
- [ ] get_campaigns - [ ] get_campaigns
- [ ] get_channels - [ ] get_channels
- [ ] get_email_channel - [ ] get_email_channel
- [ ] get_email_template
- [ ] get_endpoint - [ ] get_endpoint
- [ ] get_event_stream - [ ] get_event_stream
- [ ] get_export_job - [ ] get_export_job
@ -4730,6 +4856,7 @@
- [ ] get_gcm_channel - [ ] get_gcm_channel
- [ ] get_import_job - [ ] get_import_job
- [ ] get_import_jobs - [ ] get_import_jobs
- [ ] get_push_template
- [ ] get_segment - [ ] get_segment
- [ ] get_segment_export_jobs - [ ] get_segment_export_jobs
- [ ] get_segment_import_jobs - [ ] get_segment_import_jobs
@ -4737,9 +4864,11 @@
- [ ] get_segment_versions - [ ] get_segment_versions
- [ ] get_segments - [ ] get_segments
- [ ] get_sms_channel - [ ] get_sms_channel
- [ ] get_sms_template
- [ ] get_user_endpoints - [ ] get_user_endpoints
- [ ] get_voice_channel - [ ] get_voice_channel
- [ ] list_tags_for_resource - [ ] list_tags_for_resource
- [ ] list_templates
- [ ] phone_number_validate - [ ] phone_number_validate
- [ ] put_event_stream - [ ] put_event_stream
- [ ] put_events - [ ] put_events
@ -4757,11 +4886,14 @@
- [ ] update_baidu_channel - [ ] update_baidu_channel
- [ ] update_campaign - [ ] update_campaign
- [ ] update_email_channel - [ ] update_email_channel
- [ ] update_email_template
- [ ] update_endpoint - [ ] update_endpoint
- [ ] update_endpoints_batch - [ ] update_endpoints_batch
- [ ] update_gcm_channel - [ ] update_gcm_channel
- [ ] update_push_template
- [ ] update_segment - [ ] update_segment
- [ ] update_sms_channel - [ ] update_sms_channel
- [ ] update_sms_template
- [ ] update_voice_channel - [ ] update_voice_channel
## pinpoint-email ## pinpoint-email
@ -4837,6 +4969,28 @@
- [ ] get_attribute_values - [ ] get_attribute_values
- [ ] get_products - [ ] get_products
## qldb
0% implemented
- [ ] create_ledger
- [ ] delete_ledger
- [ ] describe_journal_s3_export
- [ ] describe_ledger
- [ ] export_journal_to_s3
- [ ] get_block
- [ ] get_digest
- [ ] get_revision
- [ ] list_journal_s3_exports
- [ ] list_journal_s3_exports_for_ledger
- [ ] list_ledgers
- [ ] list_tags_for_resource
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_ledger
## qldb-session
0% implemented
- [ ] send_command
## quicksight ## quicksight
0% implemented 0% implemented
- [ ] create_group - [ ] create_group
@ -4868,6 +5022,7 @@
- [ ] get_resource_share_associations - [ ] get_resource_share_associations
- [ ] get_resource_share_invitations - [ ] get_resource_share_invitations
- [ ] get_resource_shares - [ ] get_resource_shares
- [ ] list_pending_invitation_resources
- [ ] list_principals - [ ] list_principals
- [ ] list_resources - [ ] list_resources
- [ ] reject_resource_share_invitation - [ ] reject_resource_share_invitation
@ -4889,6 +5044,7 @@
- [ ] copy_db_parameter_group - [ ] copy_db_parameter_group
- [ ] copy_db_snapshot - [ ] copy_db_snapshot
- [ ] copy_option_group - [ ] copy_option_group
- [ ] create_custom_availability_zone
- [ ] create_db_cluster - [ ] create_db_cluster
- [ ] create_db_cluster_endpoint - [ ] create_db_cluster_endpoint
- [ ] create_db_cluster_parameter_group - [ ] create_db_cluster_parameter_group
@ -4902,6 +5058,7 @@
- [ ] create_event_subscription - [ ] create_event_subscription
- [ ] create_global_cluster - [ ] create_global_cluster
- [ ] create_option_group - [ ] create_option_group
- [ ] delete_custom_availability_zone
- [ ] delete_db_cluster - [ ] delete_db_cluster
- [ ] delete_db_cluster_endpoint - [ ] delete_db_cluster_endpoint
- [ ] delete_db_cluster_parameter_group - [ ] delete_db_cluster_parameter_group
@ -4914,9 +5071,11 @@
- [ ] delete_db_subnet_group - [ ] delete_db_subnet_group
- [ ] delete_event_subscription - [ ] delete_event_subscription
- [ ] delete_global_cluster - [ ] delete_global_cluster
- [ ] delete_installation_media
- [ ] delete_option_group - [ ] delete_option_group
- [ ] describe_account_attributes - [ ] describe_account_attributes
- [ ] describe_certificates - [ ] describe_certificates
- [ ] describe_custom_availability_zones
- [ ] describe_db_cluster_backtracks - [ ] describe_db_cluster_backtracks
- [ ] describe_db_cluster_endpoints - [ ] describe_db_cluster_endpoints
- [ ] describe_db_cluster_parameter_groups - [ ] describe_db_cluster_parameter_groups
@ -4940,6 +5099,7 @@
- [ ] describe_event_subscriptions - [ ] describe_event_subscriptions
- [ ] describe_events - [ ] describe_events
- [ ] describe_global_clusters - [ ] describe_global_clusters
- [ ] describe_installation_media
- [ ] describe_option_group_options - [ ] describe_option_group_options
- [ ] describe_option_groups - [ ] describe_option_groups
- [ ] describe_orderable_db_instance_options - [ ] describe_orderable_db_instance_options
@ -4950,6 +5110,7 @@
- [ ] describe_valid_db_instance_modifications - [ ] describe_valid_db_instance_modifications
- [ ] download_db_log_file_portion - [ ] download_db_log_file_portion
- [ ] failover_db_cluster - [ ] failover_db_cluster
- [ ] import_installation_media
- [ ] list_tags_for_resource - [ ] list_tags_for_resource
- [ ] modify_current_db_cluster_capacity - [ ] modify_current_db_cluster_capacity
- [ ] modify_db_cluster - [ ] modify_db_cluster
@ -4999,7 +5160,7 @@
- [ ] rollback_transaction - [ ] rollback_transaction
## redshift ## redshift
32% implemented 31% implemented
- [ ] accept_reserved_node_exchange - [ ] accept_reserved_node_exchange
- [ ] authorize_cluster_security_group_ingress - [ ] authorize_cluster_security_group_ingress
- [ ] authorize_snapshot_access - [ ] authorize_snapshot_access
@ -5046,6 +5207,7 @@
- [ ] describe_hsm_client_certificates - [ ] describe_hsm_client_certificates
- [ ] describe_hsm_configurations - [ ] describe_hsm_configurations
- [ ] describe_logging_status - [ ] describe_logging_status
- [ ] describe_node_configuration_options
- [ ] describe_orderable_cluster_options - [ ] describe_orderable_cluster_options
- [ ] describe_reserved_node_offerings - [ ] describe_reserved_node_offerings
- [ ] describe_reserved_nodes - [ ] describe_reserved_nodes
@ -5858,6 +6020,7 @@
- [ ] get_job_manifest - [ ] get_job_manifest
- [ ] get_job_unlock_code - [ ] get_job_unlock_code
- [ ] get_snowball_usage - [ ] get_snowball_usage
- [ ] get_software_updates
- [ ] list_cluster_jobs - [ ] list_cluster_jobs
- [ ] list_clusters - [ ] list_clusters
- [ ] list_compatible_images - [ ] list_compatible_images
@ -5866,7 +6029,7 @@
- [ ] update_job - [ ] update_job
## sns ## sns
58% implemented 57% implemented
- [ ] add_permission - [ ] add_permission
- [ ] check_if_phone_number_is_opted_out - [ ] check_if_phone_number_is_opted_out
- [ ] confirm_subscription - [ ] confirm_subscription
@ -5886,7 +6049,7 @@
- [X] list_platform_applications - [X] list_platform_applications
- [X] list_subscriptions - [X] list_subscriptions
- [ ] list_subscriptions_by_topic - [ ] list_subscriptions_by_topic
- [x] list_tags_for_resource - [X] list_tags_for_resource
- [X] list_topics - [X] list_topics
- [ ] opt_in_phone_number - [ ] opt_in_phone_number
- [X] publish - [X] publish
@ -5897,12 +6060,12 @@
- [X] set_subscription_attributes - [X] set_subscription_attributes
- [ ] set_topic_attributes - [ ] set_topic_attributes
- [X] subscribe - [X] subscribe
- [x] tag_resource - [X] tag_resource
- [X] unsubscribe - [X] unsubscribe
- [x] untag_resource - [X] untag_resource
## sqs ## sqs
75% implemented 65% implemented
- [X] add_permission - [X] add_permission
- [X] change_message_visibility - [X] change_message_visibility
- [ ] change_message_visibility_batch - [ ] change_message_visibility_batch
@ -5913,13 +6076,13 @@
- [ ] get_queue_attributes - [ ] get_queue_attributes
- [ ] get_queue_url - [ ] get_queue_url
- [X] list_dead_letter_source_queues - [X] list_dead_letter_source_queues
- [x] list_queue_tags - [ ] list_queue_tags
- [X] list_queues - [X] list_queues
- [X] purge_queue - [X] purge_queue
- [ ] receive_message - [ ] receive_message
- [X] remove_permission - [X] remove_permission
- [X] send_message - [X] send_message
- [x] send_message_batch - [ ] send_message_batch
- [X] set_queue_attributes - [X] set_queue_attributes
- [X] tag_queue - [X] tag_queue
- [X] untag_queue - [X] untag_queue
@ -5976,7 +6139,7 @@
- [ ] describe_maintenance_windows - [ ] describe_maintenance_windows
- [ ] describe_maintenance_windows_for_target - [ ] describe_maintenance_windows_for_target
- [ ] describe_ops_items - [ ] describe_ops_items
- [ ] describe_parameters - [X] describe_parameters
- [ ] describe_patch_baselines - [ ] describe_patch_baselines
- [ ] describe_patch_group_state - [ ] describe_patch_group_state
- [ ] describe_patch_groups - [ ] describe_patch_groups
@ -6048,7 +6211,7 @@
- [ ] update_service_setting - [ ] update_service_setting
## stepfunctions ## stepfunctions
0% implemented 36% implemented
- [ ] create_activity - [ ] create_activity
- [X] create_state_machine - [X] create_state_machine
- [ ] delete_activity - [ ] delete_activity
@ -6056,13 +6219,13 @@
- [ ] describe_activity - [ ] describe_activity
- [X] describe_execution - [X] describe_execution
- [X] describe_state_machine - [X] describe_state_machine
- [x] describe_state_machine_for_execution - [ ] describe_state_machine_for_execution
- [ ] get_activity_task - [ ] get_activity_task
- [ ] get_execution_history - [ ] get_execution_history
- [ ] list_activities - [ ] list_activities
- [X] list_executions - [X] list_executions
- [X] list_state_machines - [X] list_state_machines
- [X] list_tags_for_resource - [ ] list_tags_for_resource
- [ ] send_task_failure - [ ] send_task_failure
- [ ] send_task_heartbeat - [ ] send_task_heartbeat
- [ ] send_task_success - [ ] send_task_success
@ -6541,6 +6704,10 @@
- [ ] update_primary_email_address - [ ] update_primary_email_address
- [ ] update_resource - [ ] update_resource
## workmailmessageflow
0% implemented
- [ ] get_raw_message_content
## workspaces ## workspaces
0% implemented 0% implemented
- [ ] associate_ip_groups - [ ] associate_ip_groups
@ -6560,6 +6727,7 @@
- [ ] describe_workspace_bundles - [ ] describe_workspace_bundles
- [ ] describe_workspace_directories - [ ] describe_workspace_directories
- [ ] describe_workspace_images - [ ] describe_workspace_images
- [ ] describe_workspace_snapshots
- [ ] describe_workspaces - [ ] describe_workspaces
- [ ] describe_workspaces_connection_status - [ ] describe_workspaces_connection_status
- [ ] disassociate_ip_groups - [ ] disassociate_ip_groups
@ -6571,6 +6739,7 @@
- [ ] modify_workspace_state - [ ] modify_workspace_state
- [ ] reboot_workspaces - [ ] reboot_workspaces
- [ ] rebuild_workspaces - [ ] rebuild_workspaces
- [ ] restore_workspace
- [ ] revoke_ip_rules - [ ] revoke_ip_rules
- [ ] start_workspaces - [ ] start_workspaces
- [ ] stop_workspaces - [ ] stop_workspaces

View File

@ -7,6 +7,7 @@ __version__ = '1.3.14.dev'
from .acm import mock_acm # flake8: noqa from .acm import mock_acm # flake8: noqa
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
from .athena import mock_athena # flake8: noqa
from .autoscaling import mock_autoscaling, mock_autoscaling_deprecated # flake8: noqa from .autoscaling import mock_autoscaling, mock_autoscaling_deprecated # flake8: noqa
from .awslambda import mock_lambda, mock_lambda_deprecated # flake8: noqa from .awslambda import mock_lambda, mock_lambda_deprecated # flake8: noqa
from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # flake8: noqa from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # flake8: noqa

7
moto/athena/__init__.py Normal file
View File

@ -0,0 +1,7 @@
from __future__ import unicode_literals
from .models import athena_backends
from ..core.models import base_decorator, deprecated_base_decorator
athena_backend = athena_backends['us-east-1']
mock_athena = base_decorator(athena_backends)
mock_athena_deprecated = deprecated_base_decorator(athena_backends)

18
moto/athena/exceptions.py Normal file
View File

@ -0,0 +1,18 @@
from __future__ import unicode_literals
import json
from werkzeug.exceptions import BadRequest
class AthenaClientError(BadRequest):
def __init__(self, code, message):
super(AthenaClientError, self).__init__()
self.description = json.dumps({
"Error": {
"Code": code,
"Message": message,
'Type': "InvalidRequestException",
},
'RequestId': '6876f774-7273-11e4-85dc-39e55ca848d1',
})

79
moto/athena/models.py Normal file
View File

@ -0,0 +1,79 @@
from __future__ import unicode_literals
import time
import boto3
from moto.core import BaseBackend, BaseModel
ACCOUNT_ID = 123456789012
class TaggableResourceMixin(object):
# This mixing was copied from Redshift when initially implementing
# Athena. TBD if it's worth the overhead.
def __init__(self, region_name, resource_name, tags):
self.region = region_name
self.resource_name = resource_name
self.tags = tags or []
@property
def arn(self):
return "arn:aws:athena:{region}:{account_id}:{resource_name}".format(
region=self.region,
account_id=ACCOUNT_ID,
resource_name=self.resource_name)
def create_tags(self, tags):
new_keys = [tag_set['Key'] for tag_set in tags]
self.tags = [tag_set for tag_set in self.tags
if tag_set['Key'] not in new_keys]
self.tags.extend(tags)
return self.tags
def delete_tags(self, tag_keys):
self.tags = [tag_set for tag_set in self.tags
if tag_set['Key'] not in tag_keys]
return self.tags
class WorkGroup(TaggableResourceMixin, BaseModel):
resource_type = 'workgroup'
state = 'ENABLED'
def __init__(self, athena_backend, name, configuration, description, tags):
self.region_name = athena_backend.region_name
super(WorkGroup, self).__init__(self.region_name, "workgroup/{}".format(name), tags)
self.athena_backend = athena_backend
self.name = name
self.description = description
self.configuration = configuration
class AthenaBackend(BaseBackend):
region_name = None
def __init__(self, region_name=None):
if region_name is not None:
self.region_name = region_name
self.work_groups = {}
def create_work_group(self, name, configuration, description, tags):
if name in self.work_groups:
return None
work_group = WorkGroup(self, name, configuration, description, tags)
self.work_groups[name] = work_group
return work_group
def list_work_groups(self):
return [{
'Name': wg.name,
'State': wg.state,
'Description': wg.description,
'CreationTime': time.time(),
} for wg in self.work_groups.values()]
athena_backends = {}
for region in boto3.Session().get_available_regions('athena'):
athena_backends[region] = AthenaBackend(region)

35
moto/athena/responses.py Normal file
View File

@ -0,0 +1,35 @@
import json
from moto.core.responses import BaseResponse
from .models import athena_backends
class AthenaResponse(BaseResponse):
@property
def athena_backend(self):
return athena_backends[self.region]
def create_work_group(self):
name = self._get_param('Name')
description = self._get_param('Description')
configuration = self._get_param('Configuration')
tags = self._get_param('Tags')
work_group = self.athena_backend.create_work_group(name, configuration, description, tags)
if not work_group:
return json.dumps({
'__type': 'InvalidRequestException',
'Message': 'WorkGroup already exists',
}), dict(status=400)
return json.dumps({
"CreateWorkGroupResponse": {
"ResponseMetadata": {
"RequestId": "384ac68d-3775-11df-8963-01868b7c937a",
}
}
})
def list_work_groups(self):
return json.dumps({
"WorkGroups": self.athena_backend.list_work_groups()
})

10
moto/athena/urls.py Normal file
View File

@ -0,0 +1,10 @@
from __future__ import unicode_literals
from .responses import AthenaResponse
url_bases = [
"https?://athena.(.+).amazonaws.com",
]
url_paths = {
'{0}/$': AthenaResponse.dispatch,
}

1
moto/athena/utils.py Normal file
View File

@ -0,0 +1 @@
from __future__ import unicode_literals

View File

@ -645,8 +645,10 @@ class LambdaStorage(object):
self._arns[fn.function_arn] = fn self._arns[fn.function_arn] = fn
return fn return fn
def del_function(self, name, qualifier=None): def del_function(self, name_or_arn, qualifier=None):
if name in self._functions: function = self.get_function_by_name_or_arn(name_or_arn)
if function:
name = function.function_name
if not qualifier: if not qualifier:
# Something is still reffing this so delete all arns # Something is still reffing this so delete all arns
latest = self._functions[name]['latest'].function_arn latest = self._functions[name]['latest'].function_arn

View File

@ -117,6 +117,7 @@ class LambdaResponse(BaseResponse):
raise ValueError("Cannot handle {0} request".format(request.method)) raise ValueError("Cannot handle {0} request".format(request.method))
def policy(self, request, full_url, headers): def policy(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'GET': if request.method == 'GET':
return self._get_policy(request, full_url, headers) return self._get_policy(request, full_url, headers)
if request.method == 'POST': if request.method == 'POST':
@ -140,7 +141,7 @@ class LambdaResponse(BaseResponse):
path = request.path if hasattr(request, 'path') else path_url(request.url) path = request.path if hasattr(request, 'path') else path_url(request.url)
function_name = path.split('/')[-2] function_name = path.split('/')[-2]
if self.lambda_backend.get_function(function_name): if self.lambda_backend.get_function(function_name):
policy = request.body.decode('utf8') policy = self.body
self.lambda_backend.add_policy(function_name, policy) self.lambda_backend.add_policy(function_name, policy)
return 200, {}, json.dumps(dict(Statement=policy)) return 200, {}, json.dumps(dict(Statement=policy))
else: else:
@ -266,7 +267,7 @@ class LambdaResponse(BaseResponse):
return 404, {}, "{}" return 404, {}, "{}"
def _delete_function(self, request, full_url, headers): def _delete_function(self, request, full_url, headers):
function_name = self.path.rsplit('/', 1)[-1] function_name = unquote(self.path.rsplit('/', 1)[-1])
qualifier = self._get_param('Qualifier', None) qualifier = self._get_param('Qualifier', None)
if self.lambda_backend.delete_function(function_name, qualifier): if self.lambda_backend.delete_function(function_name, qualifier):

View File

@ -9,7 +9,7 @@ response = LambdaResponse()
url_paths = { url_paths = {
'{0}/(?P<api_version>[^/]+)/functions/?$': response.root, '{0}/(?P<api_version>[^/]+)/functions/?$': response.root,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/?$': response.function, r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_:%-]+)/?$': response.function,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/versions/?$': response.versions, r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/versions/?$': response.versions,
r'{0}/(?P<api_version>[^/]+)/event-source-mappings/?$': response.event_source_mappings, r'{0}/(?P<api_version>[^/]+)/event-source-mappings/?$': response.event_source_mappings,
r'{0}/(?P<api_version>[^/]+)/event-source-mappings/(?P<UUID>[\w_-]+)/?$': response.event_source_mapping, r'{0}/(?P<api_version>[^/]+)/event-source-mappings/(?P<UUID>[\w_-]+)/?$': response.event_source_mapping,

View File

@ -2,6 +2,7 @@ from __future__ import unicode_literals
from moto.acm import acm_backends from moto.acm import acm_backends
from moto.apigateway import apigateway_backends from moto.apigateway import apigateway_backends
from moto.athena import athena_backends
from moto.autoscaling import autoscaling_backends from moto.autoscaling import autoscaling_backends
from moto.awslambda import lambda_backends from moto.awslambda import lambda_backends
from moto.cloudformation import cloudformation_backends from moto.cloudformation import cloudformation_backends
@ -35,8 +36,8 @@ from moto.redshift import redshift_backends
from moto.resourcegroups import resourcegroups_backends from moto.resourcegroups import resourcegroups_backends
from moto.route53 import route53_backends from moto.route53 import route53_backends
from moto.s3 import s3_backends from moto.s3 import s3_backends
from moto.ses import ses_backends
from moto.secretsmanager import secretsmanager_backends from moto.secretsmanager import secretsmanager_backends
from moto.ses import ses_backends
from moto.sns import sns_backends from moto.sns import sns_backends
from moto.sqs import sqs_backends from moto.sqs import sqs_backends
from moto.ssm import ssm_backends from moto.ssm import ssm_backends
@ -53,6 +54,7 @@ from moto.config import config_backends
BACKENDS = { BACKENDS = {
'acm': acm_backends, 'acm': acm_backends,
'apigateway': apigateway_backends, 'apigateway': apigateway_backends,
'athena': athena_backends,
'autoscaling': autoscaling_backends, 'autoscaling': autoscaling_backends,
'batch': batch_backends, 'batch': batch_backends,
'cloudformation': cloudformation_backends, 'cloudformation': cloudformation_backends,

View File

@ -34,14 +34,76 @@ def bytesize(val):
return len(str(val).encode('utf-8')) return len(str(val).encode('utf-8'))
def attribute_is_list(attr):
"""
Checks if attribute denotes a list, and returns the regular expression if so
:param attr: attr or attr[index]
:return: attr, re or None
"""
list_index_update = re.match('(.+)\\[([0-9]+)\\]', attr)
if list_index_update:
attr = list_index_update.group(1)
return attr, list_index_update.group(2) if list_index_update else None
class DynamoType(object): class DynamoType(object):
""" """
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes
""" """
def __init__(self, type_as_dict): def __init__(self, type_as_dict):
if type(type_as_dict) == DynamoType:
self.type = type_as_dict.type
self.value = type_as_dict.value
else:
self.type = list(type_as_dict)[0] self.type = list(type_as_dict)[0]
self.value = list(type_as_dict.values())[0] self.value = list(type_as_dict.values())[0]
if self.is_list():
self.value = [DynamoType(val) for val in self.value]
elif self.is_map():
self.value = dict((k, DynamoType(v)) for k, v in self.value.items())
def set(self, key, new_value, index=None):
if index:
index = int(index)
if type(self.value) is not list:
raise InvalidUpdateExpression
if index >= len(self.value):
self.value.append(new_value)
# {'L': [DynamoType, ..]} ==> DynamoType.set()
self.value[min(index, len(self.value) - 1)].set(key, new_value)
else:
attr = (key or '').split('.').pop(0)
attr, list_index = attribute_is_list(attr)
if not key:
# {'S': value} ==> {'S': new_value}
self.value = new_value.value
else:
if attr not in self.value: # nonexistingattribute
type_of_new_attr = 'M' if '.' in key else new_value.type
self.value[attr] = DynamoType({type_of_new_attr: {}})
# {'M': {'foo': DynamoType}} ==> DynamoType.set(new_value)
self.value[attr].set('.'.join(key.split('.')[1:]), new_value, list_index)
def delete(self, key, index=None):
if index:
if not key:
if int(index) < len(self.value):
del self.value[int(index)]
elif '.' in key:
self.value[int(index)].delete('.'.join(key.split('.')[1:]))
else:
self.value[int(index)].delete(key)
else:
attr = key.split('.')[0]
attr, list_index = attribute_is_list(attr)
if list_index:
self.value[attr].delete('.'.join(key.split('.')[1:]), list_index)
elif '.' in key:
self.value[attr].delete('.'.join(key.split('.')[1:]))
else:
self.value.pop(key)
def __hash__(self): def __hash__(self):
return hash((self.type, self.value)) return hash((self.type, self.value))
@ -98,7 +160,7 @@ class DynamoType(object):
if isinstance(key, int) and self.is_list(): if isinstance(key, int) and self.is_list():
idx = key idx = key
if idx >= 0 and idx < len(self.value): if 0 <= idx < len(self.value):
return DynamoType(self.value[idx]) return DynamoType(self.value[idx])
return None return None
@ -110,7 +172,7 @@ class DynamoType(object):
sub_type = self.type[0] sub_type = self.type[0]
value_size = sum([DynamoType({sub_type: v}).size() for v in self.value]) value_size = sum([DynamoType({sub_type: v}).size() for v in self.value])
elif self.is_list(): elif self.is_list():
value_size = sum([DynamoType(v).size() for v in self.value]) value_size = sum([v.size() for v in self.value])
elif self.is_map(): elif self.is_map():
value_size = sum([bytesize(k) + DynamoType(v).size() for k, v in self.value.items()]) value_size = sum([bytesize(k) + DynamoType(v).size() for k, v in self.value.items()])
elif type(self.value) == bool: elif type(self.value) == bool:
@ -162,22 +224,6 @@ class LimitedSizeDict(dict):
raise ItemSizeTooLarge raise ItemSizeTooLarge
super(LimitedSizeDict, self).__setitem__(key, value) super(LimitedSizeDict, self).__setitem__(key, value)
def update(self, *args, **kwargs):
if args:
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, "
"got %d" % len(args))
other = dict(args[0])
for key in other:
self[key] = other[key]
for key in kwargs:
self[key] = kwargs[key]
def setdefault(self, key, value=None):
if key not in self:
self[key] = value
return self[key]
class Item(BaseModel): class Item(BaseModel):
@ -236,72 +282,26 @@ class Item(BaseModel):
if action == "REMOVE": if action == "REMOVE":
key = value key = value
attr, list_index = attribute_is_list(key.split('.')[0])
if '.' not in key: if '.' not in key:
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key) if list_index:
if list_index_update: new_list = DynamoType(self.attrs[attr])
# We need to remove an item from a list (REMOVE listattr[0]) new_list.delete(None, list_index)
key_attr = self.attrs[list_index_update.group(1)] self.attrs[attr] = new_list
list_index = int(list_index_update.group(2))
if key_attr.is_list():
if len(key_attr.value) > list_index:
del key_attr.value[list_index]
else: else:
self.attrs.pop(value, None) self.attrs.pop(value, None)
else: else:
# Handle nested dict updates # Handle nested dict updates
key_parts = key.split('.') self.attrs[attr].delete('.'.join(key.split('.')[1:]))
attr = key_parts.pop(0)
if attr not in self.attrs:
raise ValueError
last_val = self.attrs[attr].value
for key_part in key_parts[:-1]:
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key_part)
if list_index_update:
key_part = list_index_update.group(1) # listattr[1] ==> listattr
# Hack but it'll do, traverses into a dict
last_val_type = list(last_val.keys())
if last_val_type and last_val_type[0] == 'M':
last_val = last_val['M']
if key_part not in last_val:
last_val[key_part] = {'M': {}}
last_val = last_val[key_part]
if list_index_update:
last_val = last_val['L'][int(list_index_update.group(2))]
last_val_type = list(last_val.keys())
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key_parts[-1])
if list_index_update:
# We need to remove an item from a list (REMOVE attr.listattr[0])
key_part = list_index_update.group(1) # listattr[1] ==> listattr
list_to_update = last_val[key_part]['L']
index_to_remove = int(list_index_update.group(2))
if index_to_remove < len(list_to_update):
del list_to_update[index_to_remove]
else:
if last_val_type and last_val_type[0] == 'M':
last_val['M'].pop(key_parts[-1], None)
else:
last_val.pop(key_parts[-1], None)
elif action == 'SET': elif action == 'SET':
key, value = value.split("=", 1) key, value = value.split("=", 1)
key = key.strip() key = key.strip()
value = value.strip() value = value.strip()
# If not exists, changes value to a default if needed, else its the same as it was # check whether key is a list
if value.startswith('if_not_exists'): attr, list_index = attribute_is_list(key.split('.')[0])
# Function signature # If value not exists, changes value to a default if needed, else its the same as it was
match = re.match(r'.*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*', value) value = self._get_default(value)
if not match:
raise TypeError
path, value = match.groups()
# If it already exists, get its value so we dont overwrite it
if path in self.attrs:
value = self.attrs[path]
if type(value) != DynamoType: if type(value) != DynamoType:
if value in expression_attribute_values: if value in expression_attribute_values:
@ -311,55 +311,12 @@ class Item(BaseModel):
else: else:
dyn_value = value dyn_value = value
if '.' not in key: if '.' in key and attr not in self.attrs:
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key) raise ValueError # Setting nested attr not allowed if first attr does not exist yet
if list_index_update: elif attr not in self.attrs:
key_attr = self.attrs[list_index_update.group(1)] self.attrs[attr] = dyn_value # set new top-level attribute
list_index = int(list_index_update.group(2))
if key_attr.is_list():
if len(key_attr.value) > list_index:
key_attr.value[list_index] = expression_attribute_values[value]
else: else:
key_attr.value.append(expression_attribute_values[value]) self.attrs[attr].set('.'.join(key.split('.')[1:]), dyn_value, list_index) # set value recursively
else:
raise InvalidUpdateExpression
else:
self.attrs[key] = dyn_value
else:
# Handle nested dict updates
key_parts = key.split('.')
attr = key_parts.pop(0)
if attr not in self.attrs:
raise ValueError
last_val = self.attrs[attr].value
for key_part in key_parts:
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key_part)
if list_index_update:
key_part = list_index_update.group(1) # listattr[1] ==> listattr
# Hack but it'll do, traverses into a dict
last_val_type = list(last_val.keys())
if last_val_type and last_val_type[0] == 'M':
last_val = last_val['M']
if key_part not in last_val:
last_val[key_part] = {'M': {}}
last_val = last_val[key_part]
current_type = list(last_val.keys())[0]
if list_index_update:
# We need to add an item to a list
list_index = int(list_index_update.group(2))
if len(last_val['L']) > list_index:
last_val['L'][list_index] = expression_attribute_values[value]
else:
last_val['L'].append(expression_attribute_values[value])
else:
# We have reference to a nested object but we cant just assign to it
if current_type == dyn_value.type:
last_val[current_type] = dyn_value.value
else:
last_val[dyn_value.type] = dyn_value.value
del last_val[current_type]
elif action == 'ADD': elif action == 'ADD':
key, value = value.split(" ", 1) key, value = value.split(" ", 1)
@ -413,6 +370,20 @@ class Item(BaseModel):
else: else:
raise NotImplementedError('{} update action not yet supported'.format(action)) raise NotImplementedError('{} update action not yet supported'.format(action))
def _get_default(self, value):
if value.startswith('if_not_exists'):
# Function signature
match = re.match(r'.*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*', value)
if not match:
raise TypeError
path, value = match.groups()
# If it already exists, get its value so we dont overwrite it
if path in self.attrs:
value = self.attrs[path]
return value
def update_with_attribute_updates(self, attribute_updates): def update_with_attribute_updates(self, attribute_updates):
for attribute_name, update_action in attribute_updates.items(): for attribute_name, update_action in attribute_updates.items():
action = update_action['Action'] action = update_action['Action']
@ -810,7 +781,6 @@ class Table(BaseModel):
else: else:
possible_results = [item for item in list(self.all_items()) if isinstance( possible_results = [item for item in list(self.all_items()) if isinstance(
item, Item) and item.hash_key == hash_key] item, Item) and item.hash_key == hash_key]
if range_comparison: if range_comparison:
if index_name and not index_range_key: if index_name and not index_range_key:
raise ValueError( raise ValueError(

View File

@ -98,9 +98,9 @@ class TooManyTags(RESTError):
class EntityAlreadyExists(RESTError): class EntityAlreadyExists(RESTError):
code = 409 code = 409
def __init__(self): def __init__(self, message):
super(EntityAlreadyExists, self).__init__( super(EntityAlreadyExists, self).__init__(
'EntityAlreadyExists', "Unknown") 'EntityAlreadyExists', message)
class ValidationError(RESTError): class ValidationError(RESTError):

View File

@ -1,5 +1,8 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import base64 import base64
import os
import random
import string
import sys import sys
from datetime import datetime from datetime import datetime
import json import json
@ -40,6 +43,23 @@ class MFADevice(object):
return iso_8601_datetime_without_milliseconds(self.enable_date) return iso_8601_datetime_without_milliseconds(self.enable_date)
class VirtualMfaDevice(object):
def __init__(self, device_name):
self.serial_number = 'arn:aws:iam::{0}:mfa{1}'.format(ACCOUNT_ID, device_name)
random_base32_string = ''.join(random.choice(string.ascii_uppercase + '234567') for _ in range(64))
self.base32_string_seed = base64.b64encode(random_base32_string.encode('ascii')).decode('ascii')
self.qr_code_png = base64.b64encode(os.urandom(64)) # this would be a generated PNG
self.enable_date = None
self.user_attribute = None
self.user = None
@property
def enabled_iso_8601(self):
return iso_8601_datetime_without_milliseconds(self.enable_date)
class Policy(BaseModel): class Policy(BaseModel):
is_attachable = False is_attachable = False
@ -596,6 +616,7 @@ class IAMBackend(BaseBackend):
self.open_id_providers = {} self.open_id_providers = {}
self.policy_arn_regex = re.compile( self.policy_arn_regex = re.compile(
r'^arn:aws:iam::[0-9]*:policy/.*$') r'^arn:aws:iam::[0-9]*:policy/.*$')
self.virtual_mfa_devices = {}
super(IAMBackend, self).__init__() super(IAMBackend, self).__init__()
def _init_managed_policies(self): def _init_managed_policies(self):
@ -742,11 +763,25 @@ class IAMBackend(BaseBackend):
raise IAMNotFoundException("Role {0} not found".format(arn)) raise IAMNotFoundException("Role {0} not found".format(arn))
def delete_role(self, role_name): def delete_role(self, role_name):
for role in self.get_roles(): role = self.get_role(role_name)
for instance_profile in self.get_instance_profiles():
for role in instance_profile.roles:
if role.name == role_name: if role.name == role_name:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must remove roles from instance profile first."
)
if role.managed_policies:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must detach all policies first."
)
if role.policies:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must delete policies first."
)
del self.roles[role.id] del self.roles[role.id]
return
raise IAMNotFoundException("Role {0} not found".format(role_name))
def get_roles(self): def get_roles(self):
return self.roles.values() return self.roles.values()
@ -1230,6 +1265,21 @@ class IAMBackend(BaseBackend):
"Device {0} already exists".format(serial_number) "Device {0} already exists".format(serial_number)
) )
device = self.virtual_mfa_devices.get(serial_number, None)
if device:
device.enable_date = datetime.utcnow()
device.user = user
device.user_attribute = {
'Path': user.path,
'UserName': user.name,
'UserId': user.id,
'Arn': user.arn,
'CreateDate': user.created_iso_8601,
'PasswordLastUsed': None, # not supported
'PermissionsBoundary': {}, # ToDo: add put_user_permissions_boundary() functionality
'Tags': {} # ToDo: add tag_user() functionality
}
user.enable_mfa_device( user.enable_mfa_device(
serial_number, serial_number,
authentication_code_1, authentication_code_1,
@ -1244,17 +1294,87 @@ class IAMBackend(BaseBackend):
"Device {0} not found".format(serial_number) "Device {0} not found".format(serial_number)
) )
device = self.virtual_mfa_devices.get(serial_number, None)
if device:
device.enable_date = None
device.user = None
device.user_attribute = None
user.deactivate_mfa_device(serial_number) user.deactivate_mfa_device(serial_number)
def list_mfa_devices(self, user_name): def list_mfa_devices(self, user_name):
user = self.get_user(user_name) user = self.get_user(user_name)
return user.mfa_devices.values() return user.mfa_devices.values()
def create_virtual_mfa_device(self, device_name, path):
if not path:
path = '/'
if not path.startswith('/') and not path.endswith('/'):
raise ValidationError('The specified value for path is invalid. '
'It must begin and end with / and contain only alphanumeric characters and/or / characters.')
if any(not len(part) for part in path.split('/')[1:-1]):
raise ValidationError('The specified value for path is invalid. '
'It must begin and end with / and contain only alphanumeric characters and/or / characters.')
if len(path) > 512:
raise ValidationError('1 validation error detected: '
'Value "{}" at "path" failed to satisfy constraint: '
'Member must have length less than or equal to 512')
device = VirtualMfaDevice(path + device_name)
if device.serial_number in self.virtual_mfa_devices:
raise EntityAlreadyExists('MFADevice entity at the same path and name already exists.')
self.virtual_mfa_devices[device.serial_number] = device
return device
def delete_virtual_mfa_device(self, serial_number):
device = self.virtual_mfa_devices.pop(serial_number, None)
if not device:
raise IAMNotFoundException('VirtualMFADevice with serial number {0} doesn\'t exist.'.format(serial_number))
def list_virtual_mfa_devices(self, assignment_status, marker, max_items):
devices = list(self.virtual_mfa_devices.values())
if assignment_status == 'Assigned':
devices = [device for device in devices if device.enable_date]
if assignment_status == 'Unassigned':
devices = [device for device in devices if not device.enable_date]
sorted(devices, key=lambda device: device.serial_number)
max_items = int(max_items)
start_idx = int(marker) if marker else 0
if start_idx > len(devices):
raise ValidationError('Invalid Marker.')
devices = devices[start_idx:start_idx + max_items]
if len(devices) < max_items:
marker = None
else:
marker = str(start_idx + max_items)
return devices, marker
def delete_user(self, user_name): def delete_user(self, user_name):
try: user = self.get_user(user_name)
if user.managed_policies:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must detach all policies first."
)
if user.policies:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must delete policies first."
)
del self.users[user_name] del self.users[user_name]
except KeyError:
raise IAMNotFoundException("User {0} not found".format(user_name))
def report_generated(self): def report_generated(self):
return self.credential_report return self.credential_report
@ -1347,7 +1467,7 @@ class IAMBackend(BaseBackend):
open_id_provider = OpenIDConnectProvider(url, thumbprint_list, client_id_list) open_id_provider = OpenIDConnectProvider(url, thumbprint_list, client_id_list)
if open_id_provider.arn in self.open_id_providers: if open_id_provider.arn in self.open_id_providers:
raise EntityAlreadyExists raise EntityAlreadyExists('Unknown')
self.open_id_providers[open_id_provider.arn] = open_id_provider self.open_id_providers[open_id_provider.arn] = open_id_provider
return open_id_provider return open_id_provider

View File

@ -598,6 +598,33 @@ class IamResponse(BaseResponse):
template = self.response_template(LIST_MFA_DEVICES_TEMPLATE) template = self.response_template(LIST_MFA_DEVICES_TEMPLATE)
return template.render(user_name=user_name, devices=devices) return template.render(user_name=user_name, devices=devices)
def create_virtual_mfa_device(self):
path = self._get_param('Path')
virtual_mfa_device_name = self._get_param('VirtualMFADeviceName')
virtual_mfa_device = iam_backend.create_virtual_mfa_device(virtual_mfa_device_name, path)
template = self.response_template(CREATE_VIRTUAL_MFA_DEVICE_TEMPLATE)
return template.render(device=virtual_mfa_device)
def delete_virtual_mfa_device(self):
serial_number = self._get_param('SerialNumber')
iam_backend.delete_virtual_mfa_device(serial_number)
template = self.response_template(DELETE_VIRTUAL_MFA_DEVICE_TEMPLATE)
return template.render()
def list_virtual_mfa_devices(self):
assignment_status = self._get_param('AssignmentStatus', 'Any')
marker = self._get_param('Marker')
max_items = self._get_param('MaxItems', 100)
devices, marker = iam_backend.list_virtual_mfa_devices(assignment_status, marker, max_items)
template = self.response_template(LIST_VIRTUAL_MFA_DEVICES_TEMPLATE)
return template.render(devices=devices, marker=marker)
def delete_user(self): def delete_user(self):
user_name = self._get_param('UserName') user_name = self._get_param('UserName')
iam_backend.delete_user(user_name) iam_backend.delete_user(user_name)
@ -1600,6 +1627,7 @@ CREDENTIAL_REPORT_GENERATING = """
</ResponseMetadata> </ResponseMetadata>
</GenerateCredentialReportResponse>""" </GenerateCredentialReportResponse>"""
CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse> CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
<GenerateCredentialReportResult> <GenerateCredentialReportResult>
<State>COMPLETE</State> <State>COMPLETE</State>
@ -1609,6 +1637,7 @@ CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
</ResponseMetadata> </ResponseMetadata>
</GenerateCredentialReportResponse>""" </GenerateCredentialReportResponse>"""
CREDENTIAL_REPORT = """<GetCredentialReportResponse> CREDENTIAL_REPORT = """<GetCredentialReportResponse>
<GetCredentialReportResult> <GetCredentialReportResult>
<Content>{{ report }}</Content> <Content>{{ report }}</Content>
@ -1620,6 +1649,7 @@ CREDENTIAL_REPORT = """<GetCredentialReportResponse>
</ResponseMetadata> </ResponseMetadata>
</GetCredentialReportResponse>""" </GetCredentialReportResponse>"""
LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleResponse> LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleResponse>
<ListInstanceProfilesForRoleResult> <ListInstanceProfilesForRoleResult>
<IsTruncated>false</IsTruncated> <IsTruncated>false</IsTruncated>
@ -1652,6 +1682,7 @@ LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleRespon
</ResponseMetadata> </ResponseMetadata>
</ListInstanceProfilesForRoleResponse>""" </ListInstanceProfilesForRoleResponse>"""
LIST_MFA_DEVICES_TEMPLATE = """<ListMFADevicesResponse> LIST_MFA_DEVICES_TEMPLATE = """<ListMFADevicesResponse>
<ListMFADevicesResult> <ListMFADevicesResult>
<MFADevices> <MFADevices>
@ -1670,6 +1701,61 @@ LIST_MFA_DEVICES_TEMPLATE = """<ListMFADevicesResponse>
</ListMFADevicesResponse>""" </ListMFADevicesResponse>"""
CREATE_VIRTUAL_MFA_DEVICE_TEMPLATE = """<CreateVirtualMFADeviceResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<CreateVirtualMFADeviceResult>
<VirtualMFADevice>
<SerialNumber>{{ device.serial_number }}</SerialNumber>
<Base32StringSeed>{{ device.base32_string_seed }}</Base32StringSeed>
<QRCodePNG>{{ device.qr_code_png }}</QRCodePNG>
</VirtualMFADevice>
</CreateVirtualMFADeviceResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreateVirtualMFADeviceResponse>"""
DELETE_VIRTUAL_MFA_DEVICE_TEMPLATE = """<DeleteVirtualMFADeviceResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</DeleteVirtualMFADeviceResponse>"""
LIST_VIRTUAL_MFA_DEVICES_TEMPLATE = """<ListVirtualMFADevicesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListVirtualMFADevicesResult>
{% if marker is none %}
<IsTruncated>false</IsTruncated>
{% else %}
<IsTruncated>true</IsTruncated>
<Marker>{{ marker }}</Marker>
{% endif %}
<VirtualMFADevices>
{% for device in devices %}
<member>
<SerialNumber>{{ device.serial_number }}</SerialNumber>
{% if device.enable_date %}
<EnableDate>{{ device.enabled_iso_8601 }}</EnableDate>
{% endif %}
{% if device.user %}
<User>
<Path>{{ device.user.path }}</Path>
<UserName>{{ device.user.name }}</UserName>
<UserId>{{ device.user.id }}</UserId>
<CreateDate>{{ device.user.created_iso_8601 }}</CreateDate>
<Arn>{{ device.user.arn }}</Arn>
</User>
{% endif %}
</member>
{% endfor %}
</VirtualMFADevices>
</ListVirtualMFADevicesResult>
<ResponseMetadata>
<RequestId>b61ce1b1-0401-11e1-b2f8-2dEXAMPLEbfc</RequestId>
</ResponseMetadata>
</ListVirtualMFADevicesResponse>"""
LIST_ACCOUNT_ALIASES_TEMPLATE = """<ListAccountAliasesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/"> LIST_ACCOUNT_ALIASES_TEMPLATE = """<ListAccountAliasesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListAccountAliasesResult> <ListAccountAliasesResult>
<IsTruncated>false</IsTruncated> <IsTruncated>false</IsTruncated>

View File

@ -946,7 +946,7 @@ class FakeBucket(BaseModel):
} }
s_config['BucketPolicy'] = { s_config['BucketPolicy'] = {
'policyText': self.policy if self.policy else None 'policyText': self.policy.decode('utf-8') if self.policy else None
} }
s_config['IsRequesterPaysEnabled'] = 'false' if self.payer == 'BucketOwner' else 'true' s_config['IsRequesterPaysEnabled'] = 'false' if self.payer == 'BucketOwner' else 'true'

View File

@ -413,7 +413,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if marker: if marker:
result_keys = self._get_results_from_token(result_keys, marker) result_keys = self._get_results_from_token(result_keys, marker)
result_keys, is_truncated, _ = self._truncate_result(result_keys, max_keys) result_keys, is_truncated, next_marker = self._truncate_result(result_keys, max_keys)
template = self.response_template(S3_BUCKET_GET_RESPONSE) template = self.response_template(S3_BUCKET_GET_RESPONSE)
return 200, {}, template.render( return 200, {}, template.render(
@ -423,6 +423,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
result_keys=result_keys, result_keys=result_keys,
result_folders=result_folders, result_folders=result_folders,
is_truncated=is_truncated, is_truncated=is_truncated,
next_marker=next_marker,
max_keys=max_keys max_keys=max_keys
) )
@ -1327,6 +1328,9 @@ S3_BUCKET_GET_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
<MaxKeys>{{ max_keys }}</MaxKeys> <MaxKeys>{{ max_keys }}</MaxKeys>
<Delimiter>{{ delimiter }}</Delimiter> <Delimiter>{{ delimiter }}</Delimiter>
<IsTruncated>{{ is_truncated }}</IsTruncated> <IsTruncated>{{ is_truncated }}</IsTruncated>
{% if next_marker %}
<NextMarker>{{ next_marker }}</NextMarker>
{% endif %}
{% for key in result_keys %} {% for key in result_keys %}
<Contents> <Contents>
<Key>{{ key.name }}</Key> <Key>{{ key.name }}</Key>

View File

@ -71,16 +71,16 @@ def print_implementation_coverage(coverage):
def write_implementation_coverage_to_file(coverage): def write_implementation_coverage_to_file(coverage):
implementation_coverage_file = "{}/../IMPLEMENTATION_COVERAGE.md".format(script_dir)
# rewrite the implementation coverage file with updated values
# try deleting the implementation coverage file # try deleting the implementation coverage file
try: try:
os.remove("../IMPLEMENTATION_COVERAGE.md") os.remove(implementation_coverage_file)
except OSError: except OSError:
pass pass
implementation_coverage_file = "{}/../IMPLEMENTATION_COVERAGE.md".format(script_dir)
# rewrite the implementation coverage file with updated values
print("Writing to {}".format(implementation_coverage_file)) print("Writing to {}".format(implementation_coverage_file))
with open(implementation_coverage_file, "a+") as file: with open(implementation_coverage_file, "w+") as file:
for service_name in sorted(coverage): for service_name in sorted(coverage):
implemented = coverage.get(service_name)['implemented'] implemented = coverage.get(service_name)['implemented']
not_implemented = coverage.get(service_name)['not_implemented'] not_implemented = coverage.get(service_name)['not_implemented']

View File

@ -94,4 +94,7 @@ setup(
"License :: OSI Approved :: Apache Software License", "License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing", "Topic :: Software Development :: Testing",
], ],
project_urls={
"Documentation": "http://docs.getmoto.org/en/latest/",
},
) )

View File

@ -1,8 +1,8 @@
-----BEGIN CERTIFICATE----- -----BEGIN CERTIFICATE-----
MIIEUDCCAjgCCQDfXZHMio+6oDANBgkqhkiG9w0BAQ0FADBjMQswCQYDVQQGEwJH MIIEUDCCAjgCCQDfXZHMio+6oDANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJH
QjESMBAGA1UECAwJQmVya3NoaXJlMQ8wDQYDVQQHDAZTbG91Z2gxEzARBgNVBAoM QjESMBAGA1UECAwJQmVya3NoaXJlMQ8wDQYDVQQHDAZTbG91Z2gxEzARBgNVBAoM
Ck1vdG9TZXJ2ZXIxCzAJBgNVBAsMAlFBMQ0wCwYDVQQDDARNb3RvMB4XDTE3MDky Ck1vdG9TZXJ2ZXIxCzAJBgNVBAsMAlFBMQ0wCwYDVQQDDARNb3RvMB4XDTE5MTAy
MTIxMjQ1MFoXDTI3MDkxOTIxMjQ1MFowcTELMAkGA1UEBhMCR0IxEjAQBgNVBAgM MTEzMjczMVoXDTQ5MTIzMTEzMjczNFowcTELMAkGA1UEBhMCR0IxEjAQBgNVBAgM
CUJlcmtzaGlyZTEPMA0GA1UEBwwGU2xvdWdoMRMwEQYDVQQKDApNb3RvU2VydmVy CUJlcmtzaGlyZTEPMA0GA1UEBwwGU2xvdWdoMRMwEQYDVQQKDApNb3RvU2VydmVy
MRMwEQYDVQQLDApPcGVyYXRpb25zMRMwEQYDVQQDDAoqLm1vdG8uY29tMIIBIjAN MRMwEQYDVQQLDApPcGVyYXRpb25zMRMwEQYDVQQDDAoqLm1vdG8uY29tMIIBIjAN
BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzC/oBkzwiIBEceSC/tSD7hkqs8AW BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzC/oBkzwiIBEceSC/tSD7hkqs8AW
@ -11,16 +11,16 @@ niDXbMgAQE9oxUxtkFESxiNa+EbAMLBFtBkPRvc3iKXh/cfLo7yP8VdqEIDmJCB/
vpjJvf6HnrNJ7keQR+oGJNf7jVaCgOVdJ4lt7+98YDVde7jLx1DN+QbvViJQl60n vpjJvf6HnrNJ7keQR+oGJNf7jVaCgOVdJ4lt7+98YDVde7jLx1DN+QbvViJQl60n
K3bmfuLiiw8154Eyi9DOcJE8AB+W7KpPdrmbPisR1EiqY0i0L62ZixN0rPi5hHF+ K3bmfuLiiw8154Eyi9DOcJE8AB+W7KpPdrmbPisR1EiqY0i0L62ZixN0rPi5hHF+
ozwURL1axcmLjlhIFi8YhBCNcY6ThE7jrqgLIq1n6d8ezRxjDKmqfH1spQIDAQAB ozwURL1axcmLjlhIFi8YhBCNcY6ThE7jrqgLIq1n6d8ezRxjDKmqfH1spQIDAQAB
MA0GCSqGSIb3DQEBDQUAA4ICAQCgl/EfjE0Jh3cqQgoOlaFq6L1iJVgy5sYKCC4r MA0GCSqGSIb3DQEBCwUAA4ICAQAOwvJjY1cLIBVGCDPkkxH4xCP6+QRdm7bqF7X5
OU4dHgifZ6/grqCJesGiS1Vh4L8XklN++C2aSL73lVtxXoCSopP8Yj0rOGeA6b+7 DNZ70YcJ27GldrEPmKX8C1RvkC4oCsaytl8Hlw3ZcS1GvwBxTVlnYIE6nLPPi1ix
Fetm4ZQYF61QtahC0L2fkvKXR+uz1I85ndSoMJPT8lbm7sYJuL81Si32NOo6kC6y LvYYgoq+Mjk/2XPCnU/6cqJhb5INskg9s0o15jv27cUIgWVMnj+d5lvSiy1HhdYM
4eKzV4KznxdAf6XaQMKtMIyXO3PWTrjm5ayzS6UsmnBvULGDCaAQznFlVFdGNSHx wvuQzXELjhe/rHw1/BFGaBV2vd7einUQwla50UZLcsj6FwWSIsv7EB4GaY/G0XqC
CaENICR0CBcB+vbL7FPC683a4afceM+aMcMVElWG5q8fxtgbL/aPhzfonhDGWOM4 Mai2PltBgBPFqsZo27uBeVfxqMZtwAQlr4iWwWZm1haDy6D4GFCSR8E/gtlyhiN4
Rdg8x+yDdi7swxmWlcW5wlP8LpLxN/S3GR9j9IyelxUGmb20yTph3i1K6RM/Fm2W MOk1cmr9PSOMB3CWqKjkx7lPMOQT/f+gxlCnupNHsHcZGvQV4mCPiU+lLwp+8z/s
PI8xdneA6qycUAJo93NfaCuNK7yBfK3uDLqmWlGh3xCG+I1JETLRbxYBWiqeVTb3 bupQwRvu1SwSUD2rIsVeUuSP3hbMcfhiZA50lenQNApimgrThdPUoFXi07FUdL+F
qjHMrsgqTqjcaCiKR/5H2eVkdcr8mLxrV5niyBItDl1xGxj4LF8hDLormhaCjiBb 1QCk6cvA48KzGRo+bPSfZQusj51k/2+hl4sHHZdWg6mGAIY9InMKmPDE4VzM8hro
N1cMq5saj/BpoIanlqOWby6uRMYlZvuhwKQGPVWgfuRWKFzGbMWyPCxATbiU89Wb fr2fJLqKQ4h+xKbEYnvPEPttUdJbvUgr9TKKVw+m3lmW9SktzE5KtvWvN6daTj9Z
IykNkT1zTCE/eZwH12T4A7jrBiWq8WNfIST0Z7MReE6Oz+M9Pxx7DyDzSb2Y1RmU oHDJkOyko3uyTzk+HwWDC/pQ2cC+iF1MjIHi72U9ibObSODg/d9cMH3XJTnZ9W3+
xNYd8CavZLCfns00xZSo+10deMoKVS9GgxSHcS4ELaVaBQwu35emiMJSLcK7iNGE He9iuH4dJpKnVjnJ5NKt7IOrPHID77160hpwF1dim22ZRp508eYapRzgawPMpCcd
I4WVSA== a6YipQ==
-----END CERTIFICATE----- -----END CERTIFICATE-----

View File

@ -0,0 +1,59 @@
from __future__ import unicode_literals
import datetime
from botocore.exceptions import ClientError
import boto3
import sure # noqa
from moto import mock_athena
@mock_athena
def test_create_work_group():
client = boto3.client('athena', region_name='us-east-1')
response = client.create_work_group(
Name='athena_workgroup',
Description='Test work group',
Configuration={
'ResultConfiguration': {
'OutputLocation': 's3://bucket-name/prefix/',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_KMS',
'KmsKey': 'aws:arn:kms:1233456789:us-east-1:key/number-1',
},
},
},
Tags=[],
)
try:
# The second time should throw an error
response = client.create_work_group(
Name='athena_workgroup',
Description='duplicate',
Configuration={
'ResultConfiguration': {
'OutputLocation': 's3://bucket-name/prefix/',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_KMS',
'KmsKey': 'aws:arn:kms:1233456789:us-east-1:key/number-1',
},
},
},
)
except ClientError as err:
err.response['Error']['Code'].should.equal('InvalidRequestException')
err.response['Error']['Message'].should.equal('WorkGroup already exists')
else:
raise RuntimeError('Should have raised ResourceNotFoundException')
# Then test the work group appears in the work group list
response = client.list_work_groups()
response['WorkGroups'].should.have.length_of(1)
work_group = response['WorkGroups'][0]
work_group['Name'].should.equal('athena_workgroup')
work_group['Description'].should.equal('Test work group')
work_group['State'].should.equal('ENABLED')

View File

@ -415,7 +415,6 @@ def test_get_function():
conn.get_function(FunctionName='junk', Qualifier='$LATEST') conn.get_function(FunctionName='junk', Qualifier='$LATEST')
@mock_lambda @mock_lambda
@mock_s3 @mock_s3
def test_delete_function(): def test_delete_function():
@ -449,6 +448,36 @@ def test_delete_function():
success_result.should.equal({'ResponseMetadata': {'HTTPStatusCode': 204}}) success_result.should.equal({'ResponseMetadata': {'HTTPStatusCode': 204}})
function_list = conn.list_functions()
function_list['Functions'].should.have.length_of(0)
@mock_lambda
@mock_s3
def test_delete_function_by_arn():
bucket_name = 'test-bucket'
s3_conn = boto3.client('s3', 'us-east-1')
s3_conn.create_bucket(Bucket=bucket_name)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket=bucket_name, Key='test.zip', Body=zip_content)
conn = boto3.client('lambda', 'us-east-1')
fnc = conn.create_function(FunctionName='testFunction',
Runtime='python2.7', Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={'S3Bucket': bucket_name, 'S3Key': 'test.zip'},
Description='test lambda function',
Timeout=3, MemorySize=128, Publish=True)
conn.delete_function(FunctionName=fnc['FunctionArn'])
function_list = conn.list_functions()
function_list['Functions'].should.have.length_of(0)
@mock_lambda
def test_delete_unknown_function():
conn = boto3.client('lambda', 'us-west-2')
conn.delete_function.when.called_with( conn.delete_function.when.called_with(
FunctionName='testFunctionThatDoesntExist').should.throw(botocore.client.ClientError) FunctionName='testFunctionThatDoesntExist').should.throw(botocore.client.ClientError)
@ -769,10 +798,10 @@ def test_get_function_created_with_zipfile():
@mock_lambda @mock_lambda
def add_function_permission(): def test_add_function_permission():
conn = boto3.client('lambda', 'us-west-2') conn = boto3.client('lambda', 'us-west-2')
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
result = conn.create_function( conn.create_function(
FunctionName='testFunction', FunctionName='testFunction',
Runtime='python2.7', Runtime='python2.7',
Role='test-iam-role', Role='test-iam-role',
@ -796,16 +825,16 @@ def add_function_permission():
EventSourceToken='blah', EventSourceToken='blah',
Qualifier='2' Qualifier='2'
) )
assert 'Statement' in response assert u'Statement' in response
res = json.loads(response['Statement']) res = json.loads(response[u'Statement'])
assert res['Action'] == "lambda:InvokeFunction" assert res[u'Action'] == u'lambda:InvokeFunction'
@mock_lambda @mock_lambda
def get_function_policy(): def test_get_function_policy():
conn = boto3.client('lambda', 'us-west-2') conn = boto3.client('lambda', 'us-west-2')
zip_content = get_test_zip_file1() zip_content = get_test_zip_file1()
result = conn.create_function( conn.create_function(
FunctionName='testFunction', FunctionName='testFunction',
Runtime='python2.7', Runtime='python2.7',
Role='test-iam-role', Role='test-iam-role',
@ -834,10 +863,9 @@ def get_function_policy():
FunctionName='testFunction' FunctionName='testFunction'
) )
assert 'Policy' in response assert u'Policy' in response
assert isinstance(response['Policy'], str) res = json.loads(response[u'Policy'])
res = json.loads(response['Policy']) assert res[u'Statement'][0][u'Action'] == u'lambda:InvokeFunction'
assert res['Statement'][0]['Action'] == 'lambda:InvokeFunction'
@mock_lambda @mock_lambda

View File

@ -2161,20 +2161,11 @@ def test_condition_expression__attr_doesnt_exist():
client.create_table( client.create_table(
TableName='test', TableName='test',
KeySchema=[{'AttributeName': 'forum_name', 'KeyType': 'HASH'}], KeySchema=[{'AttributeName': 'forum_name', 'KeyType': 'HASH'}],
AttributeDefinitions=[ AttributeDefinitions=[{'AttributeName': 'forum_name', 'AttributeType': 'S'}],
{'AttributeName': 'forum_name', 'AttributeType': 'S'}, ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1})
],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
)
client.put_item(
TableName='test',
Item={
'forum_name': {'S': 'foo'},
'ttl': {'N': 'bar'},
}
)
client.put_item(TableName='test',
Item={'forum_name': {'S': 'foo'}, 'ttl': {'N': 'bar'}})
def update_if_attr_doesnt_exist(): def update_if_attr_doesnt_exist():
# Test nonexistent top-level attribute. # Test nonexistent top-level attribute.
@ -2261,6 +2252,7 @@ def test_condition_expression__and_order():
} }
) )
@mock_dynamodb2 @mock_dynamodb2
def test_query_gsi_with_range_key(): def test_query_gsi_with_range_key():
dynamodb = boto3.client('dynamodb', region_name='us-east-1') dynamodb = boto3.client('dynamodb', region_name='us-east-1')
@ -2510,13 +2502,15 @@ def test_index_with_unknown_attributes_should_fail():
def test_update_list_index__set_existing_index(): def test_update_list_index__set_existing_index():
table_name = 'test_list_index_access' table_name = 'test_list_index_access'
client = create_table_with_list(table_name) client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}},
UpdateExpression='set itemlist[1]=:Item', UpdateExpression='set itemlist[1]=:Item',
ExpressionAttributeValues={':Item': {'S': 'bar2_update'}}) ExpressionAttributeValues={':Item': {'S': 'bar2_update'}})
# #
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item'] result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item']
assert result['id'] == {'S': 'foo'} result['id'].should.equal({'S': 'foo'})
assert result['itemlist'] == {'L': [{'S': 'bar1'}, {'S': 'bar2_update'}, {'S': 'bar3'}]} result['itemlist'].should.equal({'L': [{'S': 'bar1'}, {'S': 'bar2_update'}, {'S': 'bar3'}]})
@mock_dynamodb2 @mock_dynamodb2
@ -2530,14 +2524,16 @@ def test_update_list_index__set_existing_nested_index():
ExpressionAttributeValues={':Item': {'S': 'bar2_update'}}) ExpressionAttributeValues={':Item': {'S': 'bar2_update'}})
# #
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo2'}})['Item'] result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo2'}})['Item']
assert result['id'] == {'S': 'foo2'} result['id'].should.equal({'S': 'foo2'})
assert result['itemmap']['M']['itemlist']['L'] == [{'S': 'bar1'}, {'S': 'bar2_update'}, {'S': 'bar3'}] result['itemmap']['M']['itemlist']['L'].should.equal([{'S': 'bar1'}, {'S': 'bar2_update'}, {'S': 'bar3'}])
@mock_dynamodb2 @mock_dynamodb2
def test_update_list_index__set_index_out_of_range(): def test_update_list_index__set_index_out_of_range():
table_name = 'test_list_index_access' table_name = 'test_list_index_access'
client = create_table_with_list(table_name) client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}},
UpdateExpression='set itemlist[10]=:Item', UpdateExpression='set itemlist[10]=:Item',
ExpressionAttributeValues={':Item': {'S': 'bar10'}}) ExpressionAttributeValues={':Item': {'S': 'bar10'}})
@ -2562,6 +2558,25 @@ def test_update_list_index__set_nested_index_out_of_range():
assert result['itemmap']['M']['itemlist']['L'] == [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}, {'S': 'bar10'}] assert result['itemmap']['M']['itemlist']['L'] == [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}, {'S': 'bar10'}]
@mock_dynamodb2
def test_update_list_index__set_double_nested_index():
table_name = 'test_list_index_access'
client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo2'},
'itemmap': {'M': {'itemlist': {'L': [{'M': {'foo': {'S': 'bar11'}, 'foos': {'S': 'bar12'}}},
{'M': {'foo': {'S': 'bar21'}, 'foos': {'S': 'bar21'}}}]}}}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo2'}},
UpdateExpression='set itemmap.itemlist[1].foos=:Item',
ExpressionAttributeValues={':Item': {'S': 'bar22'}})
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo2'}})['Item']
assert result['id'] == {'S': 'foo2'}
len(result['itemmap']['M']['itemlist']['L']).should.equal(2)
result['itemmap']['M']['itemlist']['L'][0].should.equal({'M': {'foo': {'S': 'bar11'}, 'foos': {'S': 'bar12'}}}) # unchanged
result['itemmap']['M']['itemlist']['L'][1].should.equal({'M': {'foo': {'S': 'bar21'}, 'foos': {'S': 'bar22'}}}) # updated
@mock_dynamodb2 @mock_dynamodb2
def test_update_list_index__set_index_of_a_string(): def test_update_list_index__set_index_of_a_string():
table_name = 'test_list_index_access' table_name = 'test_list_index_access'
@ -2578,15 +2593,29 @@ def test_update_list_index__set_index_of_a_string():
'The document path provided in the update expression is invalid for update') 'The document path provided in the update expression is invalid for update')
@mock_dynamodb2
def test_remove_top_level_attribute():
table_name = 'test_remove'
client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'item': {'S': 'bar'}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, UpdateExpression='REMOVE item')
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item']
result.should.equal({'id': {'S': 'foo'}})
@mock_dynamodb2 @mock_dynamodb2
def test_remove_list_index__remove_existing_index(): def test_remove_list_index__remove_existing_index():
table_name = 'test_list_index_access' table_name = 'test_list_index_access'
client = create_table_with_list(table_name) client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, UpdateExpression='REMOVE itemlist[1]') client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, UpdateExpression='REMOVE itemlist[1]')
# #
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item'] result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item']
assert result['id'] == {'S': 'foo'} result['id'].should.equal({'S': 'foo'})
assert result['itemlist'] == {'L': [{'S': 'bar1'}, {'S': 'bar3'}]} result['itemlist'].should.equal({'L': [{'S': 'bar1'}, {'S': 'bar3'}]})
@mock_dynamodb2 @mock_dynamodb2
@ -2598,8 +2627,8 @@ def test_remove_list_index__remove_existing_nested_index():
client.update_item(TableName=table_name, Key={'id': {'S': 'foo2'}}, UpdateExpression='REMOVE itemmap.itemlist[1]') client.update_item(TableName=table_name, Key={'id': {'S': 'foo2'}}, UpdateExpression='REMOVE itemmap.itemlist[1]')
# #
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo2'}})['Item'] result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo2'}})['Item']
assert result['id'] == {'S': 'foo2'} result['id'].should.equal({'S': 'foo2'})
assert result['itemmap']['M']['itemlist']['L'] == [{'S': 'bar1'}] result['itemmap']['M']['itemlist']['L'].should.equal([{'S': 'bar1'}])
@mock_dynamodb2 @mock_dynamodb2
@ -2626,6 +2655,8 @@ def test_remove_list_index__remove_existing_double_nested_index():
def test_remove_list_index__remove_index_out_of_range(): def test_remove_list_index__remove_index_out_of_range():
table_name = 'test_list_index_access' table_name = 'test_list_index_access'
client = create_table_with_list(table_name) client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, UpdateExpression='REMOVE itemlist[10]') client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, UpdateExpression='REMOVE itemlist[10]')
# #
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item'] result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item']
@ -2639,8 +2670,6 @@ def create_table_with_list(table_name):
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}], KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}], AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
BillingMode='PAY_PER_REQUEST') BillingMode='PAY_PER_REQUEST')
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
return client return client

View File

@ -214,16 +214,46 @@ def test_update_login_profile():
def test_delete_role(): def test_delete_role():
conn = boto3.client('iam', region_name='us-east-1') conn = boto3.client('iam', region_name='us-east-1')
with assert_raises(ClientError): with assert_raises(conn.exceptions.NoSuchEntityException):
conn.delete_role(RoleName="my-role") conn.delete_role(RoleName="my-role")
# Test deletion failure with a managed policy
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/") conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
role = conn.get_role(RoleName="my-role") response = conn.create_policy(PolicyName="my-managed-policy", PolicyDocument=MOCK_POLICY)
role.get('Role').get('Arn').should.equal('arn:aws:iam::123456789012:role/my-path/my-role') conn.attach_role_policy(PolicyArn=response['Policy']['Arn'], RoleName="my-role")
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_role(RoleName="my-role") conn.delete_role(RoleName="my-role")
conn.detach_role_policy(PolicyArn=response['Policy']['Arn'], RoleName="my-role")
conn.delete_policy(PolicyArn=response['Policy']['Arn'])
conn.delete_role(RoleName="my-role")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role(RoleName="my-role")
with assert_raises(ClientError): # Test deletion failure with an inline policy
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
conn.put_role_policy(RoleName="my-role", PolicyName="my-role-policy", PolicyDocument=MOCK_POLICY)
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_role(RoleName="my-role")
conn.delete_role_policy(RoleName="my-role", PolicyName="my-role-policy")
conn.delete_role(RoleName="my-role")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role(RoleName="my-role")
# Test deletion failure with attachment to an instance profile
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
conn.create_instance_profile(InstanceProfileName="my-profile")
conn.add_role_to_instance_profile(InstanceProfileName="my-profile", RoleName="my-role")
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_role(RoleName="my-role")
conn.remove_role_from_instance_profile(InstanceProfileName="my-profile", RoleName="my-role")
conn.delete_role(RoleName="my-role")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role(RoleName="my-role")
# Test deletion with no conflicts
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
conn.delete_role(RoleName="my-role")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role(RoleName="my-role") conn.get_role(RoleName="my-role")
@ -723,6 +753,263 @@ def test_mfa_devices():
len(response['MFADevices']).should.equal(0) len(response['MFADevices']).should.equal(0)
@mock_iam
def test_create_virtual_mfa_device():
client = boto3.client('iam', region_name='us-east-1')
response = client.create_virtual_mfa_device(
VirtualMFADeviceName='test-device'
)
device = response['VirtualMFADevice']
device['SerialNumber'].should.equal('arn:aws:iam::123456789012:mfa/test-device')
device['Base32StringSeed'].decode('ascii').should.match('[A-Z234567]')
device['QRCodePNG'].should_not.be.empty
response = client.create_virtual_mfa_device(
Path='/',
VirtualMFADeviceName='test-device-2'
)
device = response['VirtualMFADevice']
device['SerialNumber'].should.equal('arn:aws:iam::123456789012:mfa/test-device-2')
device['Base32StringSeed'].decode('ascii').should.match('[A-Z234567]')
device['QRCodePNG'].should_not.be.empty
response = client.create_virtual_mfa_device(
Path='/test/',
VirtualMFADeviceName='test-device'
)
device = response['VirtualMFADevice']
device['SerialNumber'].should.equal('arn:aws:iam::123456789012:mfa/test/test-device')
device['Base32StringSeed'].decode('ascii').should.match('[A-Z234567]')
device['QRCodePNG'].should_not.be.empty
@mock_iam
def test_create_virtual_mfa_device_errors():
client = boto3.client('iam', region_name='us-east-1')
client.create_virtual_mfa_device(
VirtualMFADeviceName='test-device'
)
client.create_virtual_mfa_device.when.called_with(
VirtualMFADeviceName='test-device'
).should.throw(
ClientError,
'MFADevice entity at the same path and name already exists.'
)
client.create_virtual_mfa_device.when.called_with(
Path='test',
VirtualMFADeviceName='test-device'
).should.throw(
ClientError,
'The specified value for path is invalid. '
'It must begin and end with / and contain only alphanumeric characters and/or / characters.'
)
client.create_virtual_mfa_device.when.called_with(
Path='/test//test/',
VirtualMFADeviceName='test-device'
).should.throw(
ClientError,
'The specified value for path is invalid. '
'It must begin and end with / and contain only alphanumeric characters and/or / characters.'
)
too_long_path = '/{}/'.format('b' * 511)
client.create_virtual_mfa_device.when.called_with(
Path=too_long_path,
VirtualMFADeviceName='test-device'
).should.throw(
ClientError,
'1 validation error detected: '
'Value "{}" at "path" failed to satisfy constraint: '
'Member must have length less than or equal to 512'
)
@mock_iam
def test_delete_virtual_mfa_device():
client = boto3.client('iam', region_name='us-east-1')
response = client.create_virtual_mfa_device(
VirtualMFADeviceName='test-device'
)
serial_number = response['VirtualMFADevice']['SerialNumber']
client.delete_virtual_mfa_device(
SerialNumber=serial_number
)
response = client.list_virtual_mfa_devices()
response['VirtualMFADevices'].should.have.length_of(0)
response['IsTruncated'].should_not.be.ok
@mock_iam
def test_delete_virtual_mfa_device_errors():
client = boto3.client('iam', region_name='us-east-1')
serial_number = 'arn:aws:iam::123456789012:mfa/not-existing'
client.delete_virtual_mfa_device.when.called_with(
SerialNumber=serial_number
).should.throw(
ClientError,
'VirtualMFADevice with serial number {0} doesn\'t exist.'.format(serial_number)
)
@mock_iam
def test_list_virtual_mfa_devices():
client = boto3.client('iam', region_name='us-east-1')
response = client.create_virtual_mfa_device(
VirtualMFADeviceName='test-device'
)
serial_number_1 = response['VirtualMFADevice']['SerialNumber']
response = client.create_virtual_mfa_device(
Path='/test/',
VirtualMFADeviceName='test-device'
)
serial_number_2 = response['VirtualMFADevice']['SerialNumber']
response = client.list_virtual_mfa_devices()
response['VirtualMFADevices'].should.equal([
{
'SerialNumber': serial_number_1
},
{
'SerialNumber': serial_number_2
}
])
response['IsTruncated'].should_not.be.ok
response = client.list_virtual_mfa_devices(
AssignmentStatus='Assigned'
)
response['VirtualMFADevices'].should.have.length_of(0)
response['IsTruncated'].should_not.be.ok
response = client.list_virtual_mfa_devices(
AssignmentStatus='Unassigned'
)
response['VirtualMFADevices'].should.equal([
{
'SerialNumber': serial_number_1
},
{
'SerialNumber': serial_number_2
}
])
response['IsTruncated'].should_not.be.ok
response = client.list_virtual_mfa_devices(
AssignmentStatus='Any',
MaxItems=1
)
response['VirtualMFADevices'].should.equal([
{
'SerialNumber': serial_number_1
}
])
response['IsTruncated'].should.be.ok
response['Marker'].should.equal('1')
response = client.list_virtual_mfa_devices(
AssignmentStatus='Any',
Marker=response['Marker']
)
response['VirtualMFADevices'].should.equal([
{
'SerialNumber': serial_number_2
}
])
response['IsTruncated'].should_not.be.ok
@mock_iam
def test_list_virtual_mfa_devices_errors():
client = boto3.client('iam', region_name='us-east-1')
client.create_virtual_mfa_device(
VirtualMFADeviceName='test-device'
)
client.list_virtual_mfa_devices.when.called_with(
Marker='100'
).should.throw(
ClientError,
'Invalid Marker.'
)
@mock_iam
def test_enable_virtual_mfa_device():
client = boto3.client('iam', region_name='us-east-1')
response = client.create_virtual_mfa_device(
VirtualMFADeviceName='test-device'
)
serial_number = response['VirtualMFADevice']['SerialNumber']
client.create_user(UserName='test-user')
client.enable_mfa_device(
UserName='test-user',
SerialNumber=serial_number,
AuthenticationCode1='234567',
AuthenticationCode2='987654'
)
response = client.list_virtual_mfa_devices(
AssignmentStatus='Unassigned'
)
response['VirtualMFADevices'].should.have.length_of(0)
response['IsTruncated'].should_not.be.ok
response = client.list_virtual_mfa_devices(
AssignmentStatus='Assigned'
)
device = response['VirtualMFADevices'][0]
device['SerialNumber'].should.equal(serial_number)
device['User']['Path'].should.equal('/')
device['User']['UserName'].should.equal('test-user')
device['User']['UserId'].should_not.be.empty
device['User']['Arn'].should.equal('arn:aws:iam::123456789012:user/test-user')
device['User']['CreateDate'].should.be.a(datetime)
device['EnableDate'].should.be.a(datetime)
response['IsTruncated'].should_not.be.ok
client.deactivate_mfa_device(
UserName='test-user',
SerialNumber=serial_number
)
response = client.list_virtual_mfa_devices(
AssignmentStatus='Assigned'
)
response['VirtualMFADevices'].should.have.length_of(0)
response['IsTruncated'].should_not.be.ok
response = client.list_virtual_mfa_devices(
AssignmentStatus = 'Unassigned'
)
response['VirtualMFADevices'].should.equal([
{
'SerialNumber': serial_number
}
])
response['IsTruncated'].should_not.be.ok
@mock_iam_deprecated() @mock_iam_deprecated()
def test_delete_user_deprecated(): def test_delete_user_deprecated():
conn = boto.connect_iam() conn = boto.connect_iam()
@ -735,12 +1022,40 @@ def test_delete_user_deprecated():
@mock_iam() @mock_iam()
def test_delete_user(): def test_delete_user():
conn = boto3.client('iam', region_name='us-east-1') conn = boto3.client('iam', region_name='us-east-1')
with assert_raises(ClientError): with assert_raises(conn.exceptions.NoSuchEntityException):
conn.delete_user(UserName='my-user') conn.delete_user(UserName='my-user')
# Test deletion failure with a managed policy
conn.create_user(UserName='my-user') conn.create_user(UserName='my-user')
[user['UserName'] for user in conn.list_users()['Users']].should.equal(['my-user']) response = conn.create_policy(PolicyName="my-managed-policy", PolicyDocument=MOCK_POLICY)
conn.attach_user_policy(PolicyArn=response['Policy']['Arn'], UserName="my-user")
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_user(UserName='my-user') conn.delete_user(UserName='my-user')
assert conn.list_users()['Users'].should.be.empty conn.detach_user_policy(PolicyArn=response['Policy']['Arn'], UserName="my-user")
conn.delete_policy(PolicyArn=response['Policy']['Arn'])
conn.delete_user(UserName='my-user')
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_user(UserName='my-user')
# Test deletion failure with an inline policy
conn.create_user(UserName='my-user')
conn.put_user_policy(
UserName='my-user',
PolicyName='my-user-policy',
PolicyDocument=MOCK_POLICY
)
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_user(UserName='my-user')
conn.delete_user_policy(UserName='my-user', PolicyName='my-user-policy')
conn.delete_user(UserName='my-user')
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_user(UserName='my-user')
# Test deletion with no conflicts
conn.create_user(UserName='my-user')
conn.delete_user(UserName='my-user')
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_user(UserName='my-user')
@mock_iam_deprecated() @mock_iam_deprecated()

View File

@ -3,6 +3,8 @@ from __future__ import unicode_literals
import datetime import datetime
import os import os
import sys
from six.moves.urllib.request import urlopen from six.moves.urllib.request import urlopen
from six.moves.urllib.error import HTTPError from six.moves.urllib.error import HTTPError
from functools import wraps from functools import wraps
@ -1218,11 +1220,6 @@ def test_key_with_trailing_slash_in_ordinary_calling_format():
[k.name for k in bucket.get_all_keys()].should.contain(key_name) [k.name for k in bucket.get_all_keys()].should.contain(key_name)
"""
boto3
"""
@mock_s3 @mock_s3
def test_boto3_key_etag(): def test_boto3_key_etag():
s3 = boto3.client('s3', region_name='us-east-1') s3 = boto3.client('s3', region_name='us-east-1')
@ -1247,6 +1244,54 @@ def test_website_redirect_location():
resp['WebsiteRedirectLocation'].should.equal(url) resp['WebsiteRedirectLocation'].should.equal(url)
@mock_s3
def test_boto3_list_objects_truncated_response():
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
s3.put_object(Bucket='mybucket', Key='one', Body=b'1')
s3.put_object(Bucket='mybucket', Key='two', Body=b'22')
s3.put_object(Bucket='mybucket', Key='three', Body=b'333')
# First list
resp = s3.list_objects(Bucket='mybucket', MaxKeys=1)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'one'
assert resp['MaxKeys'] == 1
assert resp['IsTruncated'] == True
assert resp['Prefix'] == 'None'
assert resp['Delimiter'] == 'None'
assert 'NextMarker' in resp
next_marker = resp["NextMarker"]
# Second list
resp = s3.list_objects(
Bucket='mybucket', MaxKeys=1, Marker=next_marker)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'three'
assert resp['MaxKeys'] == 1
assert resp['IsTruncated'] == True
assert resp['Prefix'] == 'None'
assert resp['Delimiter'] == 'None'
assert 'NextMarker' in resp
next_marker = resp["NextMarker"]
# Third list
resp = s3.list_objects(
Bucket='mybucket', MaxKeys=1, Marker=next_marker)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'two'
assert resp['MaxKeys'] == 1
assert resp['IsTruncated'] == False
assert resp['Prefix'] == 'None'
assert resp['Delimiter'] == 'None'
assert 'NextMarker' not in resp
@mock_s3 @mock_s3
def test_boto3_list_keys_xml_escaped(): def test_boto3_list_keys_xml_escaped():
s3 = boto3.client('s3', region_name='us-east-1') s3 = boto3.client('s3', region_name='us-east-1')
@ -3647,6 +3692,24 @@ def test_s3_config_dict():
s3_config_query.backends['global'].set_bucket_acl('logbucket', log_acls) s3_config_query.backends['global'].set_bucket_acl('logbucket', log_acls)
s3_config_query.backends['global'].put_bucket_logging('bucket1', {'TargetBucket': 'logbucket', 'TargetPrefix': ''}) s3_config_query.backends['global'].put_bucket_logging('bucket1', {'TargetBucket': 'logbucket', 'TargetPrefix': ''})
policy = json.dumps({
'Statement': [
{
"Effect": "Deny",
"Action": "s3:DeleteObject",
"Principal": "*",
"Resource": "arn:aws:s3:::bucket1/*"
}
]
})
# The policy is a byte array -- need to encode in Python 3 -- for Python 2 just pass the raw string in:
if sys.version_info[0] > 2:
pass_policy = bytes(policy, 'utf-8')
else:
pass_policy = policy
s3_config_query.backends['global'].set_bucket_policy('bucket1', pass_policy)
# Get the us-west-2 bucket and verify that it works properly: # Get the us-west-2 bucket and verify that it works properly:
bucket1_result = s3_config_query.get_config_resource('bucket1') bucket1_result = s3_config_query.get_config_resource('bucket1')
@ -3666,7 +3729,7 @@ def test_s3_config_dict():
{'destinationBucketName': 'logbucket', 'logFilePrefix': ''} {'destinationBucketName': 'logbucket', 'logFilePrefix': ''}
# Verify the policy: # Verify the policy:
assert json.loads(bucket1_result['supplementaryConfiguration']['BucketPolicy']) == {'policyText': None} assert json.loads(bucket1_result['supplementaryConfiguration']['BucketPolicy']) == {'policyText': policy}
# Filter by correct region: # Filter by correct region:
assert bucket1_result == s3_config_query.get_config_resource('bucket1', resource_region='us-west-2') assert bucket1_result == s3_config_query.get_config_resource('bucket1', resource_region='us-west-2')
@ -3679,3 +3742,7 @@ def test_s3_config_dict():
# With an incorrect resource name: # With an incorrect resource name:
assert not s3_config_query.get_config_resource('bucket1', resource_name='eu-bucket-1') assert not s3_config_query.get_config_resource('bucket1', resource_name='eu-bucket-1')
# Verify that no bucket policy returns the proper value:
assert json.loads(s3_config_query.get_config_resource('logbucket')['supplementaryConfiguration']['BucketPolicy']) == \
{'policyText': None}