Merge branch 'master' into add-iam-virtual-mfa-device

This commit is contained in:
Jack Danger 2019-10-22 14:54:10 -07:00 committed by GitHub
commit 00045ae480
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 746 additions and 231 deletions

View File

@ -146,12 +146,15 @@
- [ ] delete_domain_association
- [ ] delete_job
- [ ] delete_webhook
- [ ] generate_access_logs
- [ ] get_app
- [ ] get_artifact_url
- [ ] get_branch
- [ ] get_domain_association
- [ ] get_job
- [ ] get_webhook
- [ ] list_apps
- [ ] list_artifacts
- [ ] list_branches
- [ ] list_domain_associations
- [ ] list_jobs
@ -292,6 +295,8 @@
## apigatewaymanagementapi
0% implemented
- [ ] delete_connection
- [ ] get_connection
- [ ] post_to_connection
## apigatewayv2
@ -385,6 +390,7 @@
- [ ] list_applications
- [ ] list_components
- [ ] list_problems
- [ ] update_application
- [ ] update_component
- [ ] update_component_configuration
@ -509,11 +515,11 @@
- [ ] update_type
## athena
0% implemented
10% implemented
- [ ] batch_get_named_query
- [ ] batch_get_query_execution
- [ ] create_named_query
- [ ] create_work_group
- [X] create_work_group
- [ ] delete_named_query
- [ ] delete_work_group
- [ ] get_named_query
@ -523,7 +529,7 @@
- [ ] list_named_queries
- [ ] list_query_executions
- [ ] list_tags_for_resource
- [ ] list_work_groups
- [X] list_work_groups
- [ ] start_query_execution
- [ ] stop_query_execution
- [ ] tag_resource
@ -1069,6 +1075,7 @@
## codecommit
0% implemented
- [ ] batch_describe_merge_conflicts
- [ ] batch_get_commits
- [ ] batch_get_repositories
- [ ] create_branch
- [ ] create_commit
@ -1232,7 +1239,7 @@
- [ ] update_user_profile
## cognito-identity
23% implemented
28% implemented
- [X] create_identity_pool
- [ ] delete_identities
- [ ] delete_identity_pool
@ -1428,13 +1435,22 @@
## comprehendmedical
0% implemented
- [ ] describe_entities_detection_v2_job
- [ ] describe_phi_detection_job
- [ ] detect_entities
- [ ] detect_entities_v2
- [ ] detect_phi
- [ ] list_entities_detection_v2_jobs
- [ ] list_phi_detection_jobs
- [ ] start_entities_detection_v2_job
- [ ] start_phi_detection_job
- [ ] stop_entities_detection_v2_job
- [ ] stop_phi_detection_job
## config
24% implemented
- [ ] batch_get_aggregate_resource_config
- [ ] batch_get_resource_config
31% implemented
- [X] batch_get_aggregate_resource_config
- [X] batch_get_resource_config
- [X] delete_aggregation_authorization
- [ ] delete_config_rule
- [X] delete_configuration_aggregator
@ -1444,6 +1460,7 @@
- [ ] delete_organization_config_rule
- [ ] delete_pending_aggregation_request
- [ ] delete_remediation_configuration
- [ ] delete_remediation_exceptions
- [ ] delete_retention_configuration
- [ ] deliver_config_snapshot
- [ ] describe_aggregate_compliance_by_config_rules
@ -1462,6 +1479,7 @@
- [ ] describe_organization_config_rules
- [ ] describe_pending_aggregation_requests
- [ ] describe_remediation_configurations
- [ ] describe_remediation_exceptions
- [ ] describe_remediation_execution_status
- [ ] describe_retention_configurations
- [ ] get_aggregate_compliance_details_by_config_rule
@ -1474,9 +1492,9 @@
- [ ] get_compliance_summary_by_resource_type
- [ ] get_discovered_resource_counts
- [ ] get_organization_config_rule_detailed_status
- [ ] get_resource_config_history
- [ ] list_aggregate_discovered_resources
- [ ] list_discovered_resources
- [X] get_resource_config_history
- [X] list_aggregate_discovered_resources
- [X] list_discovered_resources
- [ ] list_tags_for_resource
- [X] put_aggregation_authorization
- [ ] put_config_rule
@ -1486,6 +1504,7 @@
- [ ] put_evaluations
- [ ] put_organization_config_rule
- [ ] put_remediation_configurations
- [ ] put_remediation_exceptions
- [ ] put_retention_configuration
- [ ] select_resource_config
- [ ] start_config_rules_evaluation
@ -1523,6 +1542,7 @@
0% implemented
- [ ] delete_report_definition
- [ ] describe_report_definitions
- [ ] modify_report_definition
- [ ] put_report_definition
## datapipeline
@ -1554,6 +1574,7 @@
- [ ] create_location_efs
- [ ] create_location_nfs
- [ ] create_location_s3
- [ ] create_location_smb
- [ ] create_task
- [ ] delete_agent
- [ ] delete_location
@ -1562,6 +1583,7 @@
- [ ] describe_location_efs
- [ ] describe_location_nfs
- [ ] describe_location_s3
- [ ] describe_location_smb
- [ ] describe_task
- [ ] describe_task_execution
- [ ] list_agents
@ -1771,6 +1793,7 @@
- [ ] create_replication_subnet_group
- [ ] create_replication_task
- [ ] delete_certificate
- [ ] delete_connection
- [ ] delete_endpoint
- [ ] delete_event_subscription
- [ ] delete_replication_instance
@ -1826,6 +1849,7 @@
- [ ] delete_db_cluster_snapshot
- [ ] delete_db_instance
- [ ] delete_db_subnet_group
- [ ] describe_certificates
- [ ] describe_db_cluster_parameter_groups
- [ ] describe_db_cluster_parameters
- [ ] describe_db_cluster_snapshot_attributes
@ -2061,6 +2085,7 @@
- [X] delete_network_interface
- [ ] delete_network_interface_permission
- [ ] delete_placement_group
- [ ] delete_queued_reserved_instances
- [X] delete_route
- [X] delete_route_table
- [X] delete_security_group
@ -2105,6 +2130,7 @@
- [X] describe_dhcp_options
- [ ] describe_egress_only_internet_gateways
- [ ] describe_elastic_gpus
- [ ] describe_export_image_tasks
- [ ] describe_export_tasks
- [ ] describe_fleet_history
- [ ] describe_fleet_instances
@ -2210,6 +2236,7 @@
- [ ] enable_vpc_classic_link_dns_support
- [ ] export_client_vpn_client_certificate_revocation_list
- [ ] export_client_vpn_client_configuration
- [ ] export_image
- [ ] export_transit_gateway_routes
- [ ] get_capacity_reservation_usage
- [ ] get_console_output
@ -2263,6 +2290,8 @@
- [ ] modify_vpc_peering_connection_options
- [ ] modify_vpc_tenancy
- [ ] modify_vpn_connection
- [ ] modify_vpn_tunnel_certificate
- [ ] modify_vpn_tunnel_options
- [ ] monitor_instances
- [ ] move_address_to_vpc
- [ ] provision_byoip_cidr
@ -2298,6 +2327,7 @@
- [ ] run_instances
- [ ] run_scheduled_instances
- [ ] search_transit_gateway_routes
- [ ] send_diagnostic_interrupt
- [X] start_instances
- [X] stop_instances
- [ ] terminate_client_vpn_connections
@ -2343,7 +2373,7 @@
- [ ] upload_layer_part
## ecs
49% implemented
66% implemented
- [X] create_cluster
- [X] create_service
- [ ] create_task_set
@ -2381,8 +2411,9 @@
- [ ] submit_attachment_state_changes
- [ ] submit_container_state_change
- [ ] submit_task_state_change
- [x] tag_resource
- [x] untag_resource
- [X] tag_resource
- [X] untag_resource
- [ ] update_cluster_settings
- [ ] update_container_agent
- [X] update_container_instances_state
- [X] update_service
@ -2413,7 +2444,10 @@
- [ ] describe_cluster
- [ ] describe_update
- [ ] list_clusters
- [ ] list_tags_for_resource
- [ ] list_updates
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_cluster_config
- [ ] update_cluster_version
@ -2603,7 +2637,7 @@
- [X] set_subnets
## emr
55% implemented
51% implemented
- [ ] add_instance_fleet
- [X] add_instance_groups
- [X] add_job_flow_steps
@ -2615,6 +2649,7 @@
- [X] describe_job_flows
- [ ] describe_security_configuration
- [X] describe_step
- [ ] get_block_public_access_configuration
- [X] list_bootstrap_actions
- [X] list_clusters
- [ ] list_instance_fleets
@ -2625,6 +2660,7 @@
- [ ] modify_instance_fleet
- [X] modify_instance_groups
- [ ] put_auto_scaling_policy
- [ ] put_block_public_access_configuration
- [ ] remove_auto_scaling_policy
- [X] remove_tags
- [X] run_job_flow
@ -2724,6 +2760,39 @@
- [ ] put_notification_channel
- [ ] put_policy
## forecast
0% implemented
- [ ] create_dataset
- [ ] create_dataset_group
- [ ] create_dataset_import_job
- [ ] create_forecast
- [ ] create_forecast_export_job
- [ ] create_predictor
- [ ] delete_dataset
- [ ] delete_dataset_group
- [ ] delete_dataset_import_job
- [ ] delete_forecast
- [ ] delete_forecast_export_job
- [ ] delete_predictor
- [ ] describe_dataset
- [ ] describe_dataset_group
- [ ] describe_dataset_import_job
- [ ] describe_forecast
- [ ] describe_forecast_export_job
- [ ] describe_predictor
- [ ] get_accuracy_metrics
- [ ] list_dataset_groups
- [ ] list_dataset_import_jobs
- [ ] list_datasets
- [ ] list_forecast_export_jobs
- [ ] list_forecasts
- [ ] list_predictors
- [ ] update_dataset_group
## forecastquery
0% implemented
- [ ] query_forecast
## fsx
0% implemented
- [ ] create_backup
@ -2871,7 +2940,7 @@
- [ ] update_listener
## glue
5% implemented
4% implemented
- [ ] batch_create_partition
- [ ] batch_delete_connection
- [ ] batch_delete_partition
@ -2884,12 +2953,14 @@
- [ ] batch_get_triggers
- [ ] batch_get_workflows
- [ ] batch_stop_job_run
- [ ] cancel_ml_task_run
- [ ] create_classifier
- [ ] create_connection
- [ ] create_crawler
- [X] create_database
- [ ] create_dev_endpoint
- [ ] create_job
- [ ] create_ml_transform
- [ ] create_partition
- [ ] create_script
- [ ] create_security_configuration
@ -2903,6 +2974,7 @@
- [ ] delete_database
- [ ] delete_dev_endpoint
- [ ] delete_job
- [ ] delete_ml_transform
- [ ] delete_partition
- [ ] delete_resource_policy
- [ ] delete_security_configuration
@ -2927,11 +2999,14 @@
- [ ] get_dev_endpoints
- [ ] get_job
- [ ] get_job_bookmark
- [ ] get_job_bookmarks
- [ ] get_job_run
- [ ] get_job_runs
- [ ] get_jobs
- [ ] get_mapping
- [ ] get_ml_task_run
- [ ] get_ml_task_runs
- [ ] get_ml_transform
- [ ] get_ml_transforms
- [ ] get_partition
- [ ] get_partitions
- [ ] get_plan
@ -2961,9 +3036,14 @@
- [ ] put_resource_policy
- [ ] put_workflow_run_properties
- [ ] reset_job_bookmark
- [ ] search_tables
- [ ] start_crawler
- [ ] start_crawler_schedule
- [ ] start_export_labels_task_run
- [ ] start_import_labels_task_run
- [ ] start_job_run
- [ ] start_ml_evaluation_task_run
- [ ] start_ml_labeling_set_generation_task_run
- [ ] start_trigger
- [ ] start_workflow_run
- [ ] stop_crawler
@ -2978,6 +3058,7 @@
- [ ] update_database
- [ ] update_dev_endpoint
- [ ] update_job
- [ ] update_ml_transform
- [ ] update_partition
- [ ] update_table
- [ ] update_trigger
@ -3355,7 +3436,7 @@
- [ ] update_assessment_target
## iot
24% implemented
23% implemented
- [ ] accept_certificate_transfer
- [ ] add_thing_to_billing_group
- [X] add_thing_to_thing_group
@ -3364,6 +3445,7 @@
- [X] attach_principal_policy
- [ ] attach_security_profile
- [X] attach_thing_principal
- [ ] cancel_audit_mitigation_actions_task
- [ ] cancel_audit_task
- [ ] cancel_certificate_transfer
- [ ] cancel_job
@ -3375,6 +3457,7 @@
- [ ] create_dynamic_thing_group
- [X] create_job
- [X] create_keys_and_certificate
- [ ] create_mitigation_action
- [ ] create_ota_update
- [X] create_policy
- [ ] create_policy_version
@ -3394,6 +3477,7 @@
- [ ] delete_dynamic_thing_group
- [ ] delete_job
- [ ] delete_job_execution
- [ ] delete_mitigation_action
- [ ] delete_ota_update
- [X] delete_policy
- [ ] delete_policy_version
@ -3409,6 +3493,8 @@
- [ ] delete_v2_logging_level
- [ ] deprecate_thing_type
- [ ] describe_account_audit_configuration
- [ ] describe_audit_finding
- [ ] describe_audit_mitigation_actions_task
- [ ] describe_audit_task
- [ ] describe_authorizer
- [ ] describe_billing_group
@ -3420,6 +3506,7 @@
- [ ] describe_index
- [X] describe_job
- [ ] describe_job_execution
- [ ] describe_mitigation_action
- [ ] describe_role_alias
- [ ] describe_scheduled_audit
- [ ] describe_security_profile
@ -3448,6 +3535,8 @@
- [ ] list_active_violations
- [ ] list_attached_policies
- [ ] list_audit_findings
- [ ] list_audit_mitigation_actions_executions
- [ ] list_audit_mitigation_actions_tasks
- [ ] list_audit_tasks
- [ ] list_authorizers
- [ ] list_billing_groups
@ -3458,6 +3547,7 @@
- [ ] list_job_executions_for_job
- [ ] list_job_executions_for_thing
- [ ] list_jobs
- [ ] list_mitigation_actions
- [ ] list_ota_updates
- [ ] list_outgoing_certificates
- [X] list_policies
@ -3498,6 +3588,7 @@
- [ ] set_logging_options
- [ ] set_v2_logging_level
- [ ] set_v2_logging_options
- [ ] start_audit_mitigation_actions_task
- [ ] start_on_demand_audit_task
- [ ] start_thing_registration_task
- [ ] stop_thing_registration_task
@ -3515,6 +3606,7 @@
- [ ] update_event_configurations
- [ ] update_indexing_configuration
- [ ] update_job
- [ ] update_mitigation_action
- [ ] update_role_alias
- [ ] update_scheduled_audit
- [ ] update_security_profile
@ -3692,6 +3784,7 @@
- [ ] list_tags_for_resource
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_broker_count
- [ ] update_broker_storage
- [ ] update_cluster_configuration
@ -3801,7 +3894,7 @@
- [ ] update_stream
## kms
54% implemented
48% implemented
- [X] cancel_key_deletion
- [ ] connect_custom_key_store
- [ ] create_alias
@ -3821,8 +3914,8 @@
- [X] enable_key_rotation
- [X] encrypt
- [X] generate_data_key
- [X] generate_data_key_without_plaintext
- [X] generate_random
- [ ] generate_data_key_without_plaintext
- [ ] generate_random
- [X] get_key_policy
- [X] get_key_rotation_status
- [ ] get_parameters_for_import
@ -3844,6 +3937,22 @@
- [ ] update_custom_key_store
- [X] update_key_description
## lakeformation
0% implemented
- [ ] batch_grant_permissions
- [ ] batch_revoke_permissions
- [ ] deregister_resource
- [ ] describe_resource
- [ ] get_data_lake_settings
- [ ] get_effective_permissions_for_path
- [ ] grant_permissions
- [ ] list_permissions
- [ ] list_resources
- [ ] put_data_lake_settings
- [ ] register_resource
- [ ] revoke_permissions
- [ ] update_resource
## lambda
0% implemented
- [ ] add_layer_version_permission
@ -3927,8 +4036,11 @@
## lex-runtime
0% implemented
- [ ] delete_session
- [ ] get_session
- [ ] post_content
- [ ] post_text
- [ ] put_session
## license-manager
0% implemented
@ -3972,6 +4084,7 @@
- [ ] create_relational_database
- [ ] create_relational_database_from_snapshot
- [ ] create_relational_database_snapshot
- [ ] delete_auto_snapshot
- [ ] delete_disk
- [ ] delete_disk_snapshot
- [ ] delete_domain
@ -3987,9 +4100,12 @@
- [ ] detach_disk
- [ ] detach_instances_from_load_balancer
- [ ] detach_static_ip
- [ ] disable_add_on
- [ ] download_default_key_pair
- [ ] enable_add_on
- [ ] export_snapshot
- [ ] get_active_names
- [ ] get_auto_snapshots
- [ ] get_blueprints
- [ ] get_bundles
- [ ] get_cloud_formation_stack_records
@ -4053,7 +4169,7 @@
- [ ] update_relational_database_parameters
## logs
28% implemented
35% implemented
- [ ] associate_kms_key
- [ ] cancel_export_task
- [ ] create_export_task
@ -4250,12 +4366,15 @@
## mediapackage
0% implemented
- [ ] create_channel
- [ ] create_harvest_job
- [ ] create_origin_endpoint
- [ ] delete_channel
- [ ] delete_origin_endpoint
- [ ] describe_channel
- [ ] describe_harvest_job
- [ ] describe_origin_endpoint
- [ ] list_channels
- [ ] list_harvest_jobs
- [ ] list_origin_endpoints
- [ ] list_tags_for_resource
- [ ] rotate_channel_credentials
@ -4686,9 +4805,12 @@
0% implemented
- [ ] create_app
- [ ] create_campaign
- [ ] create_email_template
- [ ] create_export_job
- [ ] create_import_job
- [ ] create_push_template
- [ ] create_segment
- [ ] create_sms_template
- [ ] delete_adm_channel
- [ ] delete_apns_channel
- [ ] delete_apns_sandbox_channel
@ -4698,11 +4820,14 @@
- [ ] delete_baidu_channel
- [ ] delete_campaign
- [ ] delete_email_channel
- [ ] delete_email_template
- [ ] delete_endpoint
- [ ] delete_event_stream
- [ ] delete_gcm_channel
- [ ] delete_push_template
- [ ] delete_segment
- [ ] delete_sms_channel
- [ ] delete_sms_template
- [ ] delete_user_endpoints
- [ ] delete_voice_channel
- [ ] get_adm_channel
@ -4723,6 +4848,7 @@
- [ ] get_campaigns
- [ ] get_channels
- [ ] get_email_channel
- [ ] get_email_template
- [ ] get_endpoint
- [ ] get_event_stream
- [ ] get_export_job
@ -4730,6 +4856,7 @@
- [ ] get_gcm_channel
- [ ] get_import_job
- [ ] get_import_jobs
- [ ] get_push_template
- [ ] get_segment
- [ ] get_segment_export_jobs
- [ ] get_segment_import_jobs
@ -4737,9 +4864,11 @@
- [ ] get_segment_versions
- [ ] get_segments
- [ ] get_sms_channel
- [ ] get_sms_template
- [ ] get_user_endpoints
- [ ] get_voice_channel
- [ ] list_tags_for_resource
- [ ] list_templates
- [ ] phone_number_validate
- [ ] put_event_stream
- [ ] put_events
@ -4757,11 +4886,14 @@
- [ ] update_baidu_channel
- [ ] update_campaign
- [ ] update_email_channel
- [ ] update_email_template
- [ ] update_endpoint
- [ ] update_endpoints_batch
- [ ] update_gcm_channel
- [ ] update_push_template
- [ ] update_segment
- [ ] update_sms_channel
- [ ] update_sms_template
- [ ] update_voice_channel
## pinpoint-email
@ -4837,6 +4969,28 @@
- [ ] get_attribute_values
- [ ] get_products
## qldb
0% implemented
- [ ] create_ledger
- [ ] delete_ledger
- [ ] describe_journal_s3_export
- [ ] describe_ledger
- [ ] export_journal_to_s3
- [ ] get_block
- [ ] get_digest
- [ ] get_revision
- [ ] list_journal_s3_exports
- [ ] list_journal_s3_exports_for_ledger
- [ ] list_ledgers
- [ ] list_tags_for_resource
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_ledger
## qldb-session
0% implemented
- [ ] send_command
## quicksight
0% implemented
- [ ] create_group
@ -4868,6 +5022,7 @@
- [ ] get_resource_share_associations
- [ ] get_resource_share_invitations
- [ ] get_resource_shares
- [ ] list_pending_invitation_resources
- [ ] list_principals
- [ ] list_resources
- [ ] reject_resource_share_invitation
@ -4889,6 +5044,7 @@
- [ ] copy_db_parameter_group
- [ ] copy_db_snapshot
- [ ] copy_option_group
- [ ] create_custom_availability_zone
- [ ] create_db_cluster
- [ ] create_db_cluster_endpoint
- [ ] create_db_cluster_parameter_group
@ -4902,6 +5058,7 @@
- [ ] create_event_subscription
- [ ] create_global_cluster
- [ ] create_option_group
- [ ] delete_custom_availability_zone
- [ ] delete_db_cluster
- [ ] delete_db_cluster_endpoint
- [ ] delete_db_cluster_parameter_group
@ -4914,9 +5071,11 @@
- [ ] delete_db_subnet_group
- [ ] delete_event_subscription
- [ ] delete_global_cluster
- [ ] delete_installation_media
- [ ] delete_option_group
- [ ] describe_account_attributes
- [ ] describe_certificates
- [ ] describe_custom_availability_zones
- [ ] describe_db_cluster_backtracks
- [ ] describe_db_cluster_endpoints
- [ ] describe_db_cluster_parameter_groups
@ -4940,6 +5099,7 @@
- [ ] describe_event_subscriptions
- [ ] describe_events
- [ ] describe_global_clusters
- [ ] describe_installation_media
- [ ] describe_option_group_options
- [ ] describe_option_groups
- [ ] describe_orderable_db_instance_options
@ -4950,6 +5110,7 @@
- [ ] describe_valid_db_instance_modifications
- [ ] download_db_log_file_portion
- [ ] failover_db_cluster
- [ ] import_installation_media
- [ ] list_tags_for_resource
- [ ] modify_current_db_cluster_capacity
- [ ] modify_db_cluster
@ -4999,7 +5160,7 @@
- [ ] rollback_transaction
## redshift
32% implemented
31% implemented
- [ ] accept_reserved_node_exchange
- [ ] authorize_cluster_security_group_ingress
- [ ] authorize_snapshot_access
@ -5046,6 +5207,7 @@
- [ ] describe_hsm_client_certificates
- [ ] describe_hsm_configurations
- [ ] describe_logging_status
- [ ] describe_node_configuration_options
- [ ] describe_orderable_cluster_options
- [ ] describe_reserved_node_offerings
- [ ] describe_reserved_nodes
@ -5858,6 +6020,7 @@
- [ ] get_job_manifest
- [ ] get_job_unlock_code
- [ ] get_snowball_usage
- [ ] get_software_updates
- [ ] list_cluster_jobs
- [ ] list_clusters
- [ ] list_compatible_images
@ -5866,7 +6029,7 @@
- [ ] update_job
## sns
58% implemented
57% implemented
- [ ] add_permission
- [ ] check_if_phone_number_is_opted_out
- [ ] confirm_subscription
@ -5886,7 +6049,7 @@
- [X] list_platform_applications
- [X] list_subscriptions
- [ ] list_subscriptions_by_topic
- [x] list_tags_for_resource
- [X] list_tags_for_resource
- [X] list_topics
- [ ] opt_in_phone_number
- [X] publish
@ -5897,12 +6060,12 @@
- [X] set_subscription_attributes
- [ ] set_topic_attributes
- [X] subscribe
- [x] tag_resource
- [X] tag_resource
- [X] unsubscribe
- [x] untag_resource
- [X] untag_resource
## sqs
75% implemented
65% implemented
- [X] add_permission
- [X] change_message_visibility
- [ ] change_message_visibility_batch
@ -5913,13 +6076,13 @@
- [ ] get_queue_attributes
- [ ] get_queue_url
- [X] list_dead_letter_source_queues
- [x] list_queue_tags
- [ ] list_queue_tags
- [X] list_queues
- [X] purge_queue
- [ ] receive_message
- [X] remove_permission
- [X] send_message
- [x] send_message_batch
- [ ] send_message_batch
- [X] set_queue_attributes
- [X] tag_queue
- [X] untag_queue
@ -5976,7 +6139,7 @@
- [ ] describe_maintenance_windows
- [ ] describe_maintenance_windows_for_target
- [ ] describe_ops_items
- [ ] describe_parameters
- [X] describe_parameters
- [ ] describe_patch_baselines
- [ ] describe_patch_group_state
- [ ] describe_patch_groups
@ -6048,7 +6211,7 @@
- [ ] update_service_setting
## stepfunctions
0% implemented
36% implemented
- [ ] create_activity
- [X] create_state_machine
- [ ] delete_activity
@ -6056,13 +6219,13 @@
- [ ] describe_activity
- [X] describe_execution
- [X] describe_state_machine
- [x] describe_state_machine_for_execution
- [ ] describe_state_machine_for_execution
- [ ] get_activity_task
- [ ] get_execution_history
- [ ] list_activities
- [X] list_executions
- [X] list_state_machines
- [X] list_tags_for_resource
- [ ] list_tags_for_resource
- [ ] send_task_failure
- [ ] send_task_heartbeat
- [ ] send_task_success
@ -6541,6 +6704,10 @@
- [ ] update_primary_email_address
- [ ] update_resource
## workmailmessageflow
0% implemented
- [ ] get_raw_message_content
## workspaces
0% implemented
- [ ] associate_ip_groups
@ -6560,6 +6727,7 @@
- [ ] describe_workspace_bundles
- [ ] describe_workspace_directories
- [ ] describe_workspace_images
- [ ] describe_workspace_snapshots
- [ ] describe_workspaces
- [ ] describe_workspaces_connection_status
- [ ] disassociate_ip_groups
@ -6571,6 +6739,7 @@
- [ ] modify_workspace_state
- [ ] reboot_workspaces
- [ ] rebuild_workspaces
- [ ] restore_workspace
- [ ] revoke_ip_rules
- [ ] start_workspaces
- [ ] stop_workspaces

View File

@ -7,6 +7,7 @@ __version__ = '1.3.14.dev'
from .acm import mock_acm # flake8: noqa
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
from .athena import mock_athena # flake8: noqa
from .autoscaling import mock_autoscaling, mock_autoscaling_deprecated # flake8: noqa
from .awslambda import mock_lambda, mock_lambda_deprecated # flake8: noqa
from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # flake8: noqa

7
moto/athena/__init__.py Normal file
View File

@ -0,0 +1,7 @@
from __future__ import unicode_literals
from .models import athena_backends
from ..core.models import base_decorator, deprecated_base_decorator
athena_backend = athena_backends['us-east-1']
mock_athena = base_decorator(athena_backends)
mock_athena_deprecated = deprecated_base_decorator(athena_backends)

18
moto/athena/exceptions.py Normal file
View File

@ -0,0 +1,18 @@
from __future__ import unicode_literals
import json
from werkzeug.exceptions import BadRequest
class AthenaClientError(BadRequest):
def __init__(self, code, message):
super(AthenaClientError, self).__init__()
self.description = json.dumps({
"Error": {
"Code": code,
"Message": message,
'Type': "InvalidRequestException",
},
'RequestId': '6876f774-7273-11e4-85dc-39e55ca848d1',
})

79
moto/athena/models.py Normal file
View File

@ -0,0 +1,79 @@
from __future__ import unicode_literals
import time
import boto3
from moto.core import BaseBackend, BaseModel
ACCOUNT_ID = 123456789012
class TaggableResourceMixin(object):
# This mixing was copied from Redshift when initially implementing
# Athena. TBD if it's worth the overhead.
def __init__(self, region_name, resource_name, tags):
self.region = region_name
self.resource_name = resource_name
self.tags = tags or []
@property
def arn(self):
return "arn:aws:athena:{region}:{account_id}:{resource_name}".format(
region=self.region,
account_id=ACCOUNT_ID,
resource_name=self.resource_name)
def create_tags(self, tags):
new_keys = [tag_set['Key'] for tag_set in tags]
self.tags = [tag_set for tag_set in self.tags
if tag_set['Key'] not in new_keys]
self.tags.extend(tags)
return self.tags
def delete_tags(self, tag_keys):
self.tags = [tag_set for tag_set in self.tags
if tag_set['Key'] not in tag_keys]
return self.tags
class WorkGroup(TaggableResourceMixin, BaseModel):
resource_type = 'workgroup'
state = 'ENABLED'
def __init__(self, athena_backend, name, configuration, description, tags):
self.region_name = athena_backend.region_name
super(WorkGroup, self).__init__(self.region_name, "workgroup/{}".format(name), tags)
self.athena_backend = athena_backend
self.name = name
self.description = description
self.configuration = configuration
class AthenaBackend(BaseBackend):
region_name = None
def __init__(self, region_name=None):
if region_name is not None:
self.region_name = region_name
self.work_groups = {}
def create_work_group(self, name, configuration, description, tags):
if name in self.work_groups:
return None
work_group = WorkGroup(self, name, configuration, description, tags)
self.work_groups[name] = work_group
return work_group
def list_work_groups(self):
return [{
'Name': wg.name,
'State': wg.state,
'Description': wg.description,
'CreationTime': time.time(),
} for wg in self.work_groups.values()]
athena_backends = {}
for region in boto3.Session().get_available_regions('athena'):
athena_backends[region] = AthenaBackend(region)

35
moto/athena/responses.py Normal file
View File

@ -0,0 +1,35 @@
import json
from moto.core.responses import BaseResponse
from .models import athena_backends
class AthenaResponse(BaseResponse):
@property
def athena_backend(self):
return athena_backends[self.region]
def create_work_group(self):
name = self._get_param('Name')
description = self._get_param('Description')
configuration = self._get_param('Configuration')
tags = self._get_param('Tags')
work_group = self.athena_backend.create_work_group(name, configuration, description, tags)
if not work_group:
return json.dumps({
'__type': 'InvalidRequestException',
'Message': 'WorkGroup already exists',
}), dict(status=400)
return json.dumps({
"CreateWorkGroupResponse": {
"ResponseMetadata": {
"RequestId": "384ac68d-3775-11df-8963-01868b7c937a",
}
}
})
def list_work_groups(self):
return json.dumps({
"WorkGroups": self.athena_backend.list_work_groups()
})

10
moto/athena/urls.py Normal file
View File

@ -0,0 +1,10 @@
from __future__ import unicode_literals
from .responses import AthenaResponse
url_bases = [
"https?://athena.(.+).amazonaws.com",
]
url_paths = {
'{0}/$': AthenaResponse.dispatch,
}

1
moto/athena/utils.py Normal file
View File

@ -0,0 +1 @@
from __future__ import unicode_literals

View File

@ -117,6 +117,7 @@ class LambdaResponse(BaseResponse):
raise ValueError("Cannot handle {0} request".format(request.method))
def policy(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'GET':
return self._get_policy(request, full_url, headers)
if request.method == 'POST':
@ -140,7 +141,7 @@ class LambdaResponse(BaseResponse):
path = request.path if hasattr(request, 'path') else path_url(request.url)
function_name = path.split('/')[-2]
if self.lambda_backend.get_function(function_name):
policy = request.body.decode('utf8')
policy = self.body
self.lambda_backend.add_policy(function_name, policy)
return 200, {}, json.dumps(dict(Statement=policy))
else:

View File

@ -2,6 +2,7 @@ from __future__ import unicode_literals
from moto.acm import acm_backends
from moto.apigateway import apigateway_backends
from moto.athena import athena_backends
from moto.autoscaling import autoscaling_backends
from moto.awslambda import lambda_backends
from moto.cloudformation import cloudformation_backends
@ -35,8 +36,8 @@ from moto.redshift import redshift_backends
from moto.resourcegroups import resourcegroups_backends
from moto.route53 import route53_backends
from moto.s3 import s3_backends
from moto.ses import ses_backends
from moto.secretsmanager import secretsmanager_backends
from moto.ses import ses_backends
from moto.sns import sns_backends
from moto.sqs import sqs_backends
from moto.ssm import ssm_backends
@ -53,6 +54,7 @@ from moto.config import config_backends
BACKENDS = {
'acm': acm_backends,
'apigateway': apigateway_backends,
'athena': athena_backends,
'autoscaling': autoscaling_backends,
'batch': batch_backends,
'cloudformation': cloudformation_backends,

View File

@ -34,14 +34,76 @@ def bytesize(val):
return len(str(val).encode('utf-8'))
def attribute_is_list(attr):
"""
Checks if attribute denotes a list, and returns the regular expression if so
:param attr: attr or attr[index]
:return: attr, re or None
"""
list_index_update = re.match('(.+)\\[([0-9]+)\\]', attr)
if list_index_update:
attr = list_index_update.group(1)
return attr, list_index_update.group(2) if list_index_update else None
class DynamoType(object):
"""
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes
"""
def __init__(self, type_as_dict):
self.type = list(type_as_dict)[0]
self.value = list(type_as_dict.values())[0]
if type(type_as_dict) == DynamoType:
self.type = type_as_dict.type
self.value = type_as_dict.value
else:
self.type = list(type_as_dict)[0]
self.value = list(type_as_dict.values())[0]
if self.is_list():
self.value = [DynamoType(val) for val in self.value]
elif self.is_map():
self.value = dict((k, DynamoType(v)) for k, v in self.value.items())
def set(self, key, new_value, index=None):
if index:
index = int(index)
if type(self.value) is not list:
raise InvalidUpdateExpression
if index >= len(self.value):
self.value.append(new_value)
# {'L': [DynamoType, ..]} ==> DynamoType.set()
self.value[min(index, len(self.value) - 1)].set(key, new_value)
else:
attr = (key or '').split('.').pop(0)
attr, list_index = attribute_is_list(attr)
if not key:
# {'S': value} ==> {'S': new_value}
self.value = new_value.value
else:
if attr not in self.value: # nonexistingattribute
type_of_new_attr = 'M' if '.' in key else new_value.type
self.value[attr] = DynamoType({type_of_new_attr: {}})
# {'M': {'foo': DynamoType}} ==> DynamoType.set(new_value)
self.value[attr].set('.'.join(key.split('.')[1:]), new_value, list_index)
def delete(self, key, index=None):
if index:
if not key:
if int(index) < len(self.value):
del self.value[int(index)]
elif '.' in key:
self.value[int(index)].delete('.'.join(key.split('.')[1:]))
else:
self.value[int(index)].delete(key)
else:
attr = key.split('.')[0]
attr, list_index = attribute_is_list(attr)
if list_index:
self.value[attr].delete('.'.join(key.split('.')[1:]), list_index)
elif '.' in key:
self.value[attr].delete('.'.join(key.split('.')[1:]))
else:
self.value.pop(key)
def __hash__(self):
return hash((self.type, self.value))
@ -98,7 +160,7 @@ class DynamoType(object):
if isinstance(key, int) and self.is_list():
idx = key
if idx >= 0 and idx < len(self.value):
if 0 <= idx < len(self.value):
return DynamoType(self.value[idx])
return None
@ -110,7 +172,7 @@ class DynamoType(object):
sub_type = self.type[0]
value_size = sum([DynamoType({sub_type: v}).size() for v in self.value])
elif self.is_list():
value_size = sum([DynamoType(v).size() for v in self.value])
value_size = sum([v.size() for v in self.value])
elif self.is_map():
value_size = sum([bytesize(k) + DynamoType(v).size() for k, v in self.value.items()])
elif type(self.value) == bool:
@ -162,22 +224,6 @@ class LimitedSizeDict(dict):
raise ItemSizeTooLarge
super(LimitedSizeDict, self).__setitem__(key, value)
def update(self, *args, **kwargs):
if args:
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, "
"got %d" % len(args))
other = dict(args[0])
for key in other:
self[key] = other[key]
for key in kwargs:
self[key] = kwargs[key]
def setdefault(self, key, value=None):
if key not in self:
self[key] = value
return self[key]
class Item(BaseModel):
@ -236,72 +282,26 @@ class Item(BaseModel):
if action == "REMOVE":
key = value
attr, list_index = attribute_is_list(key.split('.')[0])
if '.' not in key:
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key)
if list_index_update:
# We need to remove an item from a list (REMOVE listattr[0])
key_attr = self.attrs[list_index_update.group(1)]
list_index = int(list_index_update.group(2))
if key_attr.is_list():
if len(key_attr.value) > list_index:
del key_attr.value[list_index]
if list_index:
new_list = DynamoType(self.attrs[attr])
new_list.delete(None, list_index)
self.attrs[attr] = new_list
else:
self.attrs.pop(value, None)
else:
# Handle nested dict updates
key_parts = key.split('.')
attr = key_parts.pop(0)
if attr not in self.attrs:
raise ValueError
last_val = self.attrs[attr].value
for key_part in key_parts[:-1]:
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key_part)
if list_index_update:
key_part = list_index_update.group(1) # listattr[1] ==> listattr
# Hack but it'll do, traverses into a dict
last_val_type = list(last_val.keys())
if last_val_type and last_val_type[0] == 'M':
last_val = last_val['M']
if key_part not in last_val:
last_val[key_part] = {'M': {}}
last_val = last_val[key_part]
if list_index_update:
last_val = last_val['L'][int(list_index_update.group(2))]
last_val_type = list(last_val.keys())
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key_parts[-1])
if list_index_update:
# We need to remove an item from a list (REMOVE attr.listattr[0])
key_part = list_index_update.group(1) # listattr[1] ==> listattr
list_to_update = last_val[key_part]['L']
index_to_remove = int(list_index_update.group(2))
if index_to_remove < len(list_to_update):
del list_to_update[index_to_remove]
else:
if last_val_type and last_val_type[0] == 'M':
last_val['M'].pop(key_parts[-1], None)
else:
last_val.pop(key_parts[-1], None)
self.attrs[attr].delete('.'.join(key.split('.')[1:]))
elif action == 'SET':
key, value = value.split("=", 1)
key = key.strip()
value = value.strip()
# If not exists, changes value to a default if needed, else its the same as it was
if value.startswith('if_not_exists'):
# Function signature
match = re.match(r'.*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*', value)
if not match:
raise TypeError
path, value = match.groups()
# If it already exists, get its value so we dont overwrite it
if path in self.attrs:
value = self.attrs[path]
# check whether key is a list
attr, list_index = attribute_is_list(key.split('.')[0])
# If value not exists, changes value to a default if needed, else its the same as it was
value = self._get_default(value)
if type(value) != DynamoType:
if value in expression_attribute_values:
@ -311,55 +311,12 @@ class Item(BaseModel):
else:
dyn_value = value
if '.' not in key:
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key)
if list_index_update:
key_attr = self.attrs[list_index_update.group(1)]
list_index = int(list_index_update.group(2))
if key_attr.is_list():
if len(key_attr.value) > list_index:
key_attr.value[list_index] = expression_attribute_values[value]
else:
key_attr.value.append(expression_attribute_values[value])
else:
raise InvalidUpdateExpression
else:
self.attrs[key] = dyn_value
if '.' in key and attr not in self.attrs:
raise ValueError # Setting nested attr not allowed if first attr does not exist yet
elif attr not in self.attrs:
self.attrs[attr] = dyn_value # set new top-level attribute
else:
# Handle nested dict updates
key_parts = key.split('.')
attr = key_parts.pop(0)
if attr not in self.attrs:
raise ValueError
last_val = self.attrs[attr].value
for key_part in key_parts:
list_index_update = re.match('(.+)\\[([0-9]+)\\]', key_part)
if list_index_update:
key_part = list_index_update.group(1) # listattr[1] ==> listattr
# Hack but it'll do, traverses into a dict
last_val_type = list(last_val.keys())
if last_val_type and last_val_type[0] == 'M':
last_val = last_val['M']
if key_part not in last_val:
last_val[key_part] = {'M': {}}
last_val = last_val[key_part]
current_type = list(last_val.keys())[0]
if list_index_update:
# We need to add an item to a list
list_index = int(list_index_update.group(2))
if len(last_val['L']) > list_index:
last_val['L'][list_index] = expression_attribute_values[value]
else:
last_val['L'].append(expression_attribute_values[value])
else:
# We have reference to a nested object but we cant just assign to it
if current_type == dyn_value.type:
last_val[current_type] = dyn_value.value
else:
last_val[dyn_value.type] = dyn_value.value
del last_val[current_type]
self.attrs[attr].set('.'.join(key.split('.')[1:]), dyn_value, list_index) # set value recursively
elif action == 'ADD':
key, value = value.split(" ", 1)
@ -413,6 +370,20 @@ class Item(BaseModel):
else:
raise NotImplementedError('{} update action not yet supported'.format(action))
def _get_default(self, value):
if value.startswith('if_not_exists'):
# Function signature
match = re.match(r'.*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*', value)
if not match:
raise TypeError
path, value = match.groups()
# If it already exists, get its value so we dont overwrite it
if path in self.attrs:
value = self.attrs[path]
return value
def update_with_attribute_updates(self, attribute_updates):
for attribute_name, update_action in attribute_updates.items():
action = update_action['Action']
@ -810,7 +781,6 @@ class Table(BaseModel):
else:
possible_results = [item for item in list(self.all_items()) if isinstance(
item, Item) and item.hash_key == hash_key]
if range_comparison:
if index_name and not index_range_key:
raise ValueError(

View File

@ -763,11 +763,25 @@ class IAMBackend(BaseBackend):
raise IAMNotFoundException("Role {0} not found".format(arn))
def delete_role(self, role_name):
for role in self.get_roles():
if role.name == role_name:
del self.roles[role.id]
return
raise IAMNotFoundException("Role {0} not found".format(role_name))
role = self.get_role(role_name)
for instance_profile in self.get_instance_profiles():
for role in instance_profile.roles:
if role.name == role_name:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must remove roles from instance profile first."
)
if role.managed_policies:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must detach all policies first."
)
if role.policies:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must delete policies first."
)
del self.roles[role.id]
def get_roles(self):
return self.roles.values()
@ -1349,10 +1363,18 @@ class IAMBackend(BaseBackend):
return devices, marker
def delete_user(self, user_name):
try:
del self.users[user_name]
except KeyError:
raise IAMNotFoundException("User {0} not found".format(user_name))
user = self.get_user(user_name)
if user.managed_policies:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must detach all policies first."
)
if user.policies:
raise IAMConflictException(
code="DeleteConflict",
message="Cannot delete entity, must delete policies first."
)
del self.users[user_name]
def report_generated(self):
return self.credential_report

View File

@ -413,7 +413,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if marker:
result_keys = self._get_results_from_token(result_keys, marker)
result_keys, is_truncated, _ = self._truncate_result(result_keys, max_keys)
result_keys, is_truncated, next_marker = self._truncate_result(result_keys, max_keys)
template = self.response_template(S3_BUCKET_GET_RESPONSE)
return 200, {}, template.render(
@ -423,6 +423,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
result_keys=result_keys,
result_folders=result_folders,
is_truncated=is_truncated,
next_marker=next_marker,
max_keys=max_keys
)
@ -1327,6 +1328,9 @@ S3_BUCKET_GET_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
<MaxKeys>{{ max_keys }}</MaxKeys>
<Delimiter>{{ delimiter }}</Delimiter>
<IsTruncated>{{ is_truncated }}</IsTruncated>
{% if next_marker %}
<NextMarker>{{ next_marker }}</NextMarker>
{% endif %}
{% for key in result_keys %}
<Contents>
<Key>{{ key.name }}</Key>

View File

@ -71,16 +71,16 @@ def print_implementation_coverage(coverage):
def write_implementation_coverage_to_file(coverage):
implementation_coverage_file = "{}/../IMPLEMENTATION_COVERAGE.md".format(script_dir)
# rewrite the implementation coverage file with updated values
# try deleting the implementation coverage file
try:
os.remove("../IMPLEMENTATION_COVERAGE.md")
os.remove(implementation_coverage_file)
except OSError:
pass
implementation_coverage_file = "{}/../IMPLEMENTATION_COVERAGE.md".format(script_dir)
# rewrite the implementation coverage file with updated values
print("Writing to {}".format(implementation_coverage_file))
with open(implementation_coverage_file, "a+") as file:
with open(implementation_coverage_file, "w+") as file:
for service_name in sorted(coverage):
implemented = coverage.get(service_name)['implemented']
not_implemented = coverage.get(service_name)['not_implemented']

View File

@ -94,4 +94,7 @@ setup(
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
project_urls={
"Documentation": "http://docs.getmoto.org/en/latest/",
},
)

View File

@ -1,8 +1,8 @@
-----BEGIN CERTIFICATE-----
MIIEUDCCAjgCCQDfXZHMio+6oDANBgkqhkiG9w0BAQ0FADBjMQswCQYDVQQGEwJH
MIIEUDCCAjgCCQDfXZHMio+6oDANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJH
QjESMBAGA1UECAwJQmVya3NoaXJlMQ8wDQYDVQQHDAZTbG91Z2gxEzARBgNVBAoM
Ck1vdG9TZXJ2ZXIxCzAJBgNVBAsMAlFBMQ0wCwYDVQQDDARNb3RvMB4XDTE3MDky
MTIxMjQ1MFoXDTI3MDkxOTIxMjQ1MFowcTELMAkGA1UEBhMCR0IxEjAQBgNVBAgM
Ck1vdG9TZXJ2ZXIxCzAJBgNVBAsMAlFBMQ0wCwYDVQQDDARNb3RvMB4XDTE5MTAy
MTEzMjczMVoXDTQ5MTIzMTEzMjczNFowcTELMAkGA1UEBhMCR0IxEjAQBgNVBAgM
CUJlcmtzaGlyZTEPMA0GA1UEBwwGU2xvdWdoMRMwEQYDVQQKDApNb3RvU2VydmVy
MRMwEQYDVQQLDApPcGVyYXRpb25zMRMwEQYDVQQDDAoqLm1vdG8uY29tMIIBIjAN
BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzC/oBkzwiIBEceSC/tSD7hkqs8AW
@ -11,16 +11,16 @@ niDXbMgAQE9oxUxtkFESxiNa+EbAMLBFtBkPRvc3iKXh/cfLo7yP8VdqEIDmJCB/
vpjJvf6HnrNJ7keQR+oGJNf7jVaCgOVdJ4lt7+98YDVde7jLx1DN+QbvViJQl60n
K3bmfuLiiw8154Eyi9DOcJE8AB+W7KpPdrmbPisR1EiqY0i0L62ZixN0rPi5hHF+
ozwURL1axcmLjlhIFi8YhBCNcY6ThE7jrqgLIq1n6d8ezRxjDKmqfH1spQIDAQAB
MA0GCSqGSIb3DQEBDQUAA4ICAQCgl/EfjE0Jh3cqQgoOlaFq6L1iJVgy5sYKCC4r
OU4dHgifZ6/grqCJesGiS1Vh4L8XklN++C2aSL73lVtxXoCSopP8Yj0rOGeA6b+7
Fetm4ZQYF61QtahC0L2fkvKXR+uz1I85ndSoMJPT8lbm7sYJuL81Si32NOo6kC6y
4eKzV4KznxdAf6XaQMKtMIyXO3PWTrjm5ayzS6UsmnBvULGDCaAQznFlVFdGNSHx
CaENICR0CBcB+vbL7FPC683a4afceM+aMcMVElWG5q8fxtgbL/aPhzfonhDGWOM4
Rdg8x+yDdi7swxmWlcW5wlP8LpLxN/S3GR9j9IyelxUGmb20yTph3i1K6RM/Fm2W
PI8xdneA6qycUAJo93NfaCuNK7yBfK3uDLqmWlGh3xCG+I1JETLRbxYBWiqeVTb3
qjHMrsgqTqjcaCiKR/5H2eVkdcr8mLxrV5niyBItDl1xGxj4LF8hDLormhaCjiBb
N1cMq5saj/BpoIanlqOWby6uRMYlZvuhwKQGPVWgfuRWKFzGbMWyPCxATbiU89Wb
IykNkT1zTCE/eZwH12T4A7jrBiWq8WNfIST0Z7MReE6Oz+M9Pxx7DyDzSb2Y1RmU
xNYd8CavZLCfns00xZSo+10deMoKVS9GgxSHcS4ELaVaBQwu35emiMJSLcK7iNGE
I4WVSA==
MA0GCSqGSIb3DQEBCwUAA4ICAQAOwvJjY1cLIBVGCDPkkxH4xCP6+QRdm7bqF7X5
DNZ70YcJ27GldrEPmKX8C1RvkC4oCsaytl8Hlw3ZcS1GvwBxTVlnYIE6nLPPi1ix
LvYYgoq+Mjk/2XPCnU/6cqJhb5INskg9s0o15jv27cUIgWVMnj+d5lvSiy1HhdYM
wvuQzXELjhe/rHw1/BFGaBV2vd7einUQwla50UZLcsj6FwWSIsv7EB4GaY/G0XqC
Mai2PltBgBPFqsZo27uBeVfxqMZtwAQlr4iWwWZm1haDy6D4GFCSR8E/gtlyhiN4
MOk1cmr9PSOMB3CWqKjkx7lPMOQT/f+gxlCnupNHsHcZGvQV4mCPiU+lLwp+8z/s
bupQwRvu1SwSUD2rIsVeUuSP3hbMcfhiZA50lenQNApimgrThdPUoFXi07FUdL+F
1QCk6cvA48KzGRo+bPSfZQusj51k/2+hl4sHHZdWg6mGAIY9InMKmPDE4VzM8hro
fr2fJLqKQ4h+xKbEYnvPEPttUdJbvUgr9TKKVw+m3lmW9SktzE5KtvWvN6daTj9Z
oHDJkOyko3uyTzk+HwWDC/pQ2cC+iF1MjIHi72U9ibObSODg/d9cMH3XJTnZ9W3+
He9iuH4dJpKnVjnJ5NKt7IOrPHID77160hpwF1dim22ZRp508eYapRzgawPMpCcd
a6YipQ==
-----END CERTIFICATE-----

View File

@ -0,0 +1,59 @@
from __future__ import unicode_literals
import datetime
from botocore.exceptions import ClientError
import boto3
import sure # noqa
from moto import mock_athena
@mock_athena
def test_create_work_group():
client = boto3.client('athena', region_name='us-east-1')
response = client.create_work_group(
Name='athena_workgroup',
Description='Test work group',
Configuration={
'ResultConfiguration': {
'OutputLocation': 's3://bucket-name/prefix/',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_KMS',
'KmsKey': 'aws:arn:kms:1233456789:us-east-1:key/number-1',
},
},
},
Tags=[],
)
try:
# The second time should throw an error
response = client.create_work_group(
Name='athena_workgroup',
Description='duplicate',
Configuration={
'ResultConfiguration': {
'OutputLocation': 's3://bucket-name/prefix/',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_KMS',
'KmsKey': 'aws:arn:kms:1233456789:us-east-1:key/number-1',
},
},
},
)
except ClientError as err:
err.response['Error']['Code'].should.equal('InvalidRequestException')
err.response['Error']['Message'].should.equal('WorkGroup already exists')
else:
raise RuntimeError('Should have raised ResourceNotFoundException')
# Then test the work group appears in the work group list
response = client.list_work_groups()
response['WorkGroups'].should.have.length_of(1)
work_group = response['WorkGroups'][0]
work_group['Name'].should.equal('athena_workgroup')
work_group['Description'].should.equal('Test work group')
work_group['State'].should.equal('ENABLED')

View File

@ -769,10 +769,10 @@ def test_get_function_created_with_zipfile():
@mock_lambda
def add_function_permission():
def test_add_function_permission():
conn = boto3.client('lambda', 'us-west-2')
zip_content = get_test_zip_file1()
result = conn.create_function(
conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
@ -796,16 +796,16 @@ def add_function_permission():
EventSourceToken='blah',
Qualifier='2'
)
assert 'Statement' in response
res = json.loads(response['Statement'])
assert res['Action'] == "lambda:InvokeFunction"
assert u'Statement' in response
res = json.loads(response[u'Statement'])
assert res[u'Action'] == u'lambda:InvokeFunction'
@mock_lambda
def get_function_policy():
def test_get_function_policy():
conn = boto3.client('lambda', 'us-west-2')
zip_content = get_test_zip_file1()
result = conn.create_function(
conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
@ -834,10 +834,9 @@ def get_function_policy():
FunctionName='testFunction'
)
assert 'Policy' in response
assert isinstance(response['Policy'], str)
res = json.loads(response['Policy'])
assert res['Statement'][0]['Action'] == 'lambda:InvokeFunction'
assert u'Policy' in response
res = json.loads(response[u'Policy'])
assert res[u'Statement'][0][u'Action'] == u'lambda:InvokeFunction'
@mock_lambda

View File

@ -2161,20 +2161,11 @@ def test_condition_expression__attr_doesnt_exist():
client.create_table(
TableName='test',
KeySchema=[{'AttributeName': 'forum_name', 'KeyType': 'HASH'}],
AttributeDefinitions=[
{'AttributeName': 'forum_name', 'AttributeType': 'S'},
],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1},
)
client.put_item(
TableName='test',
Item={
'forum_name': {'S': 'foo'},
'ttl': {'N': 'bar'},
}
)
AttributeDefinitions=[{'AttributeName': 'forum_name', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1})
client.put_item(TableName='test',
Item={'forum_name': {'S': 'foo'}, 'ttl': {'N': 'bar'}})
def update_if_attr_doesnt_exist():
# Test nonexistent top-level attribute.
@ -2261,6 +2252,7 @@ def test_condition_expression__and_order():
}
)
@mock_dynamodb2
def test_query_gsi_with_range_key():
dynamodb = boto3.client('dynamodb', region_name='us-east-1')
@ -2510,13 +2502,15 @@ def test_index_with_unknown_attributes_should_fail():
def test_update_list_index__set_existing_index():
table_name = 'test_list_index_access'
client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}},
UpdateExpression='set itemlist[1]=:Item',
ExpressionAttributeValues={':Item': {'S': 'bar2_update'}})
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item']
assert result['id'] == {'S': 'foo'}
assert result['itemlist'] == {'L': [{'S': 'bar1'}, {'S': 'bar2_update'}, {'S': 'bar3'}]}
result['id'].should.equal({'S': 'foo'})
result['itemlist'].should.equal({'L': [{'S': 'bar1'}, {'S': 'bar2_update'}, {'S': 'bar3'}]})
@mock_dynamodb2
@ -2530,14 +2524,16 @@ def test_update_list_index__set_existing_nested_index():
ExpressionAttributeValues={':Item': {'S': 'bar2_update'}})
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo2'}})['Item']
assert result['id'] == {'S': 'foo2'}
assert result['itemmap']['M']['itemlist']['L'] == [{'S': 'bar1'}, {'S': 'bar2_update'}, {'S': 'bar3'}]
result['id'].should.equal({'S': 'foo2'})
result['itemmap']['M']['itemlist']['L'].should.equal([{'S': 'bar1'}, {'S': 'bar2_update'}, {'S': 'bar3'}])
@mock_dynamodb2
def test_update_list_index__set_index_out_of_range():
table_name = 'test_list_index_access'
client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}},
UpdateExpression='set itemlist[10]=:Item',
ExpressionAttributeValues={':Item': {'S': 'bar10'}})
@ -2562,6 +2558,25 @@ def test_update_list_index__set_nested_index_out_of_range():
assert result['itemmap']['M']['itemlist']['L'] == [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}, {'S': 'bar10'}]
@mock_dynamodb2
def test_update_list_index__set_double_nested_index():
table_name = 'test_list_index_access'
client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo2'},
'itemmap': {'M': {'itemlist': {'L': [{'M': {'foo': {'S': 'bar11'}, 'foos': {'S': 'bar12'}}},
{'M': {'foo': {'S': 'bar21'}, 'foos': {'S': 'bar21'}}}]}}}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo2'}},
UpdateExpression='set itemmap.itemlist[1].foos=:Item',
ExpressionAttributeValues={':Item': {'S': 'bar22'}})
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo2'}})['Item']
assert result['id'] == {'S': 'foo2'}
len(result['itemmap']['M']['itemlist']['L']).should.equal(2)
result['itemmap']['M']['itemlist']['L'][0].should.equal({'M': {'foo': {'S': 'bar11'}, 'foos': {'S': 'bar12'}}}) # unchanged
result['itemmap']['M']['itemlist']['L'][1].should.equal({'M': {'foo': {'S': 'bar21'}, 'foos': {'S': 'bar22'}}}) # updated
@mock_dynamodb2
def test_update_list_index__set_index_of_a_string():
table_name = 'test_list_index_access'
@ -2578,15 +2593,29 @@ def test_update_list_index__set_index_of_a_string():
'The document path provided in the update expression is invalid for update')
@mock_dynamodb2
def test_remove_top_level_attribute():
table_name = 'test_remove'
client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'item': {'S': 'bar'}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, UpdateExpression='REMOVE item')
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item']
result.should.equal({'id': {'S': 'foo'}})
@mock_dynamodb2
def test_remove_list_index__remove_existing_index():
table_name = 'test_list_index_access'
client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, UpdateExpression='REMOVE itemlist[1]')
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item']
assert result['id'] == {'S': 'foo'}
assert result['itemlist'] == {'L': [{'S': 'bar1'}, {'S': 'bar3'}]}
result['id'].should.equal({'S': 'foo'})
result['itemlist'].should.equal({'L': [{'S': 'bar1'}, {'S': 'bar3'}]})
@mock_dynamodb2
@ -2598,8 +2627,8 @@ def test_remove_list_index__remove_existing_nested_index():
client.update_item(TableName=table_name, Key={'id': {'S': 'foo2'}}, UpdateExpression='REMOVE itemmap.itemlist[1]')
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo2'}})['Item']
assert result['id'] == {'S': 'foo2'}
assert result['itemmap']['M']['itemlist']['L'] == [{'S': 'bar1'}]
result['id'].should.equal({'S': 'foo2'})
result['itemmap']['M']['itemlist']['L'].should.equal([{'S': 'bar1'}])
@mock_dynamodb2
@ -2626,6 +2655,8 @@ def test_remove_list_index__remove_existing_double_nested_index():
def test_remove_list_index__remove_index_out_of_range():
table_name = 'test_list_index_access'
client = create_table_with_list(table_name)
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
client.update_item(TableName=table_name, Key={'id': {'S': 'foo'}}, UpdateExpression='REMOVE itemlist[10]')
#
result = client.get_item(TableName=table_name, Key={'id': {'S': 'foo'}})['Item']
@ -2639,8 +2670,6 @@ def create_table_with_list(table_name):
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
BillingMode='PAY_PER_REQUEST')
client.put_item(TableName=table_name,
Item={'id': {'S': 'foo'}, 'itemlist': {'L': [{'S': 'bar1'}, {'S': 'bar2'}, {'S': 'bar3'}]}})
return client

View File

@ -214,16 +214,46 @@ def test_update_login_profile():
def test_delete_role():
conn = boto3.client('iam', region_name='us-east-1')
with assert_raises(ClientError):
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.delete_role(RoleName="my-role")
# Test deletion failure with a managed policy
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
role = conn.get_role(RoleName="my-role")
role.get('Role').get('Arn').should.equal('arn:aws:iam::123456789012:role/my-path/my-role')
response = conn.create_policy(PolicyName="my-managed-policy", PolicyDocument=MOCK_POLICY)
conn.attach_role_policy(PolicyArn=response['Policy']['Arn'], RoleName="my-role")
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_role(RoleName="my-role")
conn.detach_role_policy(PolicyArn=response['Policy']['Arn'], RoleName="my-role")
conn.delete_policy(PolicyArn=response['Policy']['Arn'])
conn.delete_role(RoleName="my-role")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role(RoleName="my-role")
with assert_raises(ClientError):
# Test deletion failure with an inline policy
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
conn.put_role_policy(RoleName="my-role", PolicyName="my-role-policy", PolicyDocument=MOCK_POLICY)
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_role(RoleName="my-role")
conn.delete_role_policy(RoleName="my-role", PolicyName="my-role-policy")
conn.delete_role(RoleName="my-role")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role(RoleName="my-role")
# Test deletion failure with attachment to an instance profile
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
conn.create_instance_profile(InstanceProfileName="my-profile")
conn.add_role_to_instance_profile(InstanceProfileName="my-profile", RoleName="my-role")
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_role(RoleName="my-role")
conn.remove_role_from_instance_profile(InstanceProfileName="my-profile", RoleName="my-role")
conn.delete_role(RoleName="my-role")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role(RoleName="my-role")
# Test deletion with no conflicts
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
conn.delete_role(RoleName="my-role")
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_role(RoleName="my-role")
@ -992,12 +1022,40 @@ def test_delete_user_deprecated():
@mock_iam()
def test_delete_user():
conn = boto3.client('iam', region_name='us-east-1')
with assert_raises(ClientError):
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.delete_user(UserName='my-user')
# Test deletion failure with a managed policy
conn.create_user(UserName='my-user')
[user['UserName'] for user in conn.list_users()['Users']].should.equal(['my-user'])
response = conn.create_policy(PolicyName="my-managed-policy", PolicyDocument=MOCK_POLICY)
conn.attach_user_policy(PolicyArn=response['Policy']['Arn'], UserName="my-user")
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_user(UserName='my-user')
conn.detach_user_policy(PolicyArn=response['Policy']['Arn'], UserName="my-user")
conn.delete_policy(PolicyArn=response['Policy']['Arn'])
conn.delete_user(UserName='my-user')
assert conn.list_users()['Users'].should.be.empty
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_user(UserName='my-user')
# Test deletion failure with an inline policy
conn.create_user(UserName='my-user')
conn.put_user_policy(
UserName='my-user',
PolicyName='my-user-policy',
PolicyDocument=MOCK_POLICY
)
with assert_raises(conn.exceptions.DeleteConflictException):
conn.delete_user(UserName='my-user')
conn.delete_user_policy(UserName='my-user', PolicyName='my-user-policy')
conn.delete_user(UserName='my-user')
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_user(UserName='my-user')
# Test deletion with no conflicts
conn.create_user(UserName='my-user')
conn.delete_user(UserName='my-user')
with assert_raises(conn.exceptions.NoSuchEntityException):
conn.get_user(UserName='my-user')
@mock_iam_deprecated()

View File

@ -1247,6 +1247,54 @@ def test_website_redirect_location():
resp['WebsiteRedirectLocation'].should.equal(url)
@mock_s3
def test_boto3_list_objects_truncated_response():
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
s3.put_object(Bucket='mybucket', Key='one', Body=b'1')
s3.put_object(Bucket='mybucket', Key='two', Body=b'22')
s3.put_object(Bucket='mybucket', Key='three', Body=b'333')
# First list
resp = s3.list_objects(Bucket='mybucket', MaxKeys=1)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'one'
assert resp['MaxKeys'] == 1
assert resp['IsTruncated'] == True
assert resp['Prefix'] == 'None'
assert resp['Delimiter'] == 'None'
assert 'NextMarker' in resp
next_marker = resp["NextMarker"]
# Second list
resp = s3.list_objects(
Bucket='mybucket', MaxKeys=1, Marker=next_marker)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'three'
assert resp['MaxKeys'] == 1
assert resp['IsTruncated'] == True
assert resp['Prefix'] == 'None'
assert resp['Delimiter'] == 'None'
assert 'NextMarker' in resp
next_marker = resp["NextMarker"]
# Third list
resp = s3.list_objects(
Bucket='mybucket', MaxKeys=1, Marker=next_marker)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'two'
assert resp['MaxKeys'] == 1
assert resp['IsTruncated'] == False
assert resp['Prefix'] == 'None'
assert resp['Delimiter'] == 'None'
assert 'NextMarker' not in resp
@mock_s3
def test_boto3_list_keys_xml_escaped():
s3 = boto3.client('s3', region_name='us-east-1')