commit
304be5c7ee
@ -50,6 +50,41 @@ Note the `urls.py` that redirects all incoming URL requests to a generic `dispat
|
||||
|
||||
If you want more control over incoming requests or their bodies, it is possible to redirect specific requests to a custom method. See this PR for an example: https://github.com/spulec/moto/pull/2957/files
|
||||
|
||||
### Generating template code of services.
|
||||
|
||||
By using `scripts/scaffold.py`, you can automatically generate template code of new services and new method of existing service. The script looks up API specification of given boto3 method and adds necessary codes includng request parameters and response parameters. In some cases, it fails to generate codes.
|
||||
Please try out by runninig `python scripts/scaffold.py`
|
||||
|
||||
```bash
|
||||
$ python scripts/scaffold.py
|
||||
Select service: codedeploy
|
||||
|
||||
==Current Implementation Status==
|
||||
[ ] add_tags_to_on_premises_instances
|
||||
...
|
||||
[ ] create_deployment
|
||||
...[
|
||||
[ ] update_deployment_group
|
||||
=================================
|
||||
Select Operation: create_deployment
|
||||
|
||||
|
||||
Initializing service codedeploy
|
||||
creating moto/codedeploy
|
||||
creating moto/codedeploy/models.py
|
||||
creating moto/codedeploy/exceptions.py
|
||||
creating moto/codedeploy/__init__.py
|
||||
creating moto/codedeploy/responses.py
|
||||
creating moto/codedeploy/urls.py
|
||||
creating tests/test_codedeploy
|
||||
creating tests/test_codedeploy/test_server.py
|
||||
creating tests/test_codedeploy/test_codedeploy.py
|
||||
inserting code moto/codedeploy/responses.py
|
||||
inserting code moto/codedeploy/models.py
|
||||
You will still need to add the mock into "__init__.py"
|
||||
```
|
||||
|
||||
|
||||
## Maintainers
|
||||
|
||||
### Releasing a new version of Moto
|
||||
|
@ -464,13 +464,13 @@
|
||||
- [ ] delete_scaling_policy
|
||||
- [ ] delete_scheduled_action
|
||||
- [ ] deregister_scalable_target
|
||||
- [x] describe_scalable_targets
|
||||
- [X] describe_scalable_targets
|
||||
- [ ] describe_scaling_activities
|
||||
- [ ] describe_scaling_policies
|
||||
- [ ] describe_scheduled_actions
|
||||
- [ ] put_scaling_policy
|
||||
- [ ] put_scheduled_action
|
||||
- [x] register_scalable_target - includes enhanced validation support for ECS targets
|
||||
- [X] register_scalable_target
|
||||
</details>
|
||||
|
||||
## application-insights
|
||||
@ -642,15 +642,15 @@
|
||||
|
||||
## athena
|
||||
<details>
|
||||
<summary>26% implemented</summary>
|
||||
<summary>36% implemented</summary>
|
||||
|
||||
- [ ] batch_get_named_query
|
||||
- [ ] batch_get_query_execution
|
||||
- [ ] create_named_query
|
||||
- [X] create_named_query
|
||||
- [X] create_work_group
|
||||
- [ ] delete_named_query
|
||||
- [ ] delete_work_group
|
||||
- [ ] get_named_query
|
||||
- [X] get_named_query
|
||||
- [ ] get_query_execution
|
||||
- [ ] get_query_results
|
||||
- [X] get_work_group
|
||||
@ -1664,7 +1664,7 @@
|
||||
|
||||
## cognito-idp
|
||||
<details>
|
||||
<summary>38% implemented</summary>
|
||||
<summary>44% implemented</summary>
|
||||
|
||||
- [ ] add_custom_attributes
|
||||
- [X] admin_add_user_to_group
|
||||
@ -1693,11 +1693,11 @@
|
||||
- [ ] admin_update_device_status
|
||||
- [X] admin_update_user_attributes
|
||||
- [ ] admin_user_global_sign_out
|
||||
- [ ] associate_software_token
|
||||
- [X] associate_software_token
|
||||
- [X] change_password
|
||||
- [ ] confirm_device
|
||||
- [X] confirm_forgot_password
|
||||
- [ ] confirm_sign_up
|
||||
- [X] confirm_sign_up
|
||||
- [X] create_group
|
||||
- [X] create_identity_provider
|
||||
- [X] create_resource_server
|
||||
@ -1732,7 +1732,7 @@
|
||||
- [ ] get_user_attribute_verification_code
|
||||
- [ ] get_user_pool_mfa_config
|
||||
- [ ] global_sign_out
|
||||
- [ ] initiate_auth
|
||||
- [X] initiate_auth
|
||||
- [ ] list_devices
|
||||
- [X] list_groups
|
||||
- [X] list_identity_providers
|
||||
@ -1747,10 +1747,10 @@
|
||||
- [X] respond_to_auth_challenge
|
||||
- [ ] set_risk_configuration
|
||||
- [ ] set_ui_customization
|
||||
- [ ] set_user_mfa_preference
|
||||
- [X] set_user_mfa_preference
|
||||
- [ ] set_user_pool_mfa_config
|
||||
- [ ] set_user_settings
|
||||
- [ ] sign_up
|
||||
- [X] sign_up
|
||||
- [ ] start_user_import_job
|
||||
- [ ] stop_user_import_job
|
||||
- [ ] tag_resource
|
||||
@ -1764,7 +1764,7 @@
|
||||
- [ ] update_user_pool
|
||||
- [X] update_user_pool_client
|
||||
- [X] update_user_pool_domain
|
||||
- [ ] verify_software_token
|
||||
- [X] verify_software_token
|
||||
- [ ] verify_user_attribute
|
||||
</details>
|
||||
|
||||
@ -1889,7 +1889,7 @@
|
||||
|
||||
## config
|
||||
<details>
|
||||
<summary>26% implemented</summary>
|
||||
<summary>32% implemented</summary>
|
||||
|
||||
- [X] batch_get_aggregate_resource_config
|
||||
- [X] batch_get_resource_config
|
||||
@ -1901,7 +1901,7 @@
|
||||
- [X] delete_delivery_channel
|
||||
- [ ] delete_evaluation_results
|
||||
- [ ] delete_organization_config_rule
|
||||
- [ ] delete_organization_conformance_pack
|
||||
- [X] delete_organization_conformance_pack
|
||||
- [ ] delete_pending_aggregation_request
|
||||
- [ ] delete_remediation_configuration
|
||||
- [ ] delete_remediation_exceptions
|
||||
@ -1925,8 +1925,8 @@
|
||||
- [X] describe_delivery_channels
|
||||
- [ ] describe_organization_config_rule_statuses
|
||||
- [ ] describe_organization_config_rules
|
||||
- [ ] describe_organization_conformance_pack_statuses
|
||||
- [ ] describe_organization_conformance_packs
|
||||
- [X] describe_organization_conformance_pack_statuses
|
||||
- [X] describe_organization_conformance_packs
|
||||
- [ ] describe_pending_aggregation_requests
|
||||
- [ ] describe_remediation_configurations
|
||||
- [ ] describe_remediation_exceptions
|
||||
@ -1944,7 +1944,7 @@
|
||||
- [ ] get_conformance_pack_compliance_summary
|
||||
- [ ] get_discovered_resource_counts
|
||||
- [ ] get_organization_config_rule_detailed_status
|
||||
- [ ] get_organization_conformance_pack_detailed_status
|
||||
- [X] get_organization_conformance_pack_detailed_status
|
||||
- [X] get_resource_config_history
|
||||
- [X] list_aggregate_discovered_resources
|
||||
- [X] list_discovered_resources
|
||||
@ -1957,7 +1957,7 @@
|
||||
- [X] put_delivery_channel
|
||||
- [X] put_evaluations
|
||||
- [ ] put_organization_config_rule
|
||||
- [ ] put_organization_conformance_pack
|
||||
- [X] put_organization_conformance_pack
|
||||
- [ ] put_remediation_configurations
|
||||
- [ ] put_remediation_exceptions
|
||||
- [ ] put_resource_config
|
||||
@ -2580,7 +2580,7 @@
|
||||
|
||||
## ec2
|
||||
<details>
|
||||
<summary>26% implemented</summary>
|
||||
<summary>27% implemented</summary>
|
||||
|
||||
- [ ] accept_reserved_instances_exchange_quote
|
||||
- [ ] accept_transit_gateway_peering_attachment
|
||||
@ -2639,7 +2639,7 @@
|
||||
- [X] create_internet_gateway
|
||||
- [X] create_key_pair
|
||||
- [X] create_launch_template
|
||||
- [x] create_launch_template_version
|
||||
- [ ] create_launch_template_version
|
||||
- [ ] create_local_gateway_route
|
||||
- [ ] create_local_gateway_route_table_vpc_association
|
||||
- [X] create_nat_gateway
|
||||
@ -2939,7 +2939,7 @@
|
||||
- [ ] purchase_reserved_instances_offering
|
||||
- [ ] purchase_scheduled_instances
|
||||
- [X] reboot_instances
|
||||
- [ ] register_image
|
||||
- [X] register_image
|
||||
- [ ] register_instance_event_notification_attributes
|
||||
- [ ] register_transit_gateway_multicast_group_members
|
||||
- [ ] register_transit_gateway_multicast_group_sources
|
||||
@ -3031,7 +3031,7 @@
|
||||
|
||||
## ecs
|
||||
<details>
|
||||
<summary>73% implemented</summary>
|
||||
<summary>72% implemented</summary>
|
||||
|
||||
- [ ] create_capacity_provider
|
||||
- [X] create_cluster
|
||||
@ -4118,7 +4118,7 @@
|
||||
|
||||
## iam
|
||||
<details>
|
||||
<summary>69% implemented</summary>
|
||||
<summary>70% implemented</summary>
|
||||
|
||||
- [ ] add_client_id_to_open_id_connect_provider
|
||||
- [X] add_role_to_instance_profile
|
||||
@ -4146,7 +4146,7 @@
|
||||
- [X] delete_account_alias
|
||||
- [X] delete_account_password_policy
|
||||
- [X] delete_group
|
||||
- [ ] delete_group_policy
|
||||
- [X] delete_group_policy
|
||||
- [X] delete_instance_profile
|
||||
- [X] delete_login_profile
|
||||
- [X] delete_open_id_connect_provider
|
||||
@ -4367,7 +4367,7 @@
|
||||
|
||||
## iot
|
||||
<details>
|
||||
<summary>27% implemented</summary>
|
||||
<summary>28% implemented</summary>
|
||||
|
||||
- [ ] accept_certificate_transfer
|
||||
- [ ] add_thing_to_billing_group
|
||||
@ -4447,7 +4447,7 @@
|
||||
- [ ] describe_default_authorizer
|
||||
- [ ] describe_dimension
|
||||
- [ ] describe_domain_configuration
|
||||
- [ ] describe_endpoint
|
||||
- [X] describe_endpoint
|
||||
- [ ] describe_event_configurations
|
||||
- [ ] describe_index
|
||||
- [X] describe_job
|
||||
@ -4533,7 +4533,7 @@
|
||||
- [ ] list_violation_events
|
||||
- [ ] register_ca_certificate
|
||||
- [X] register_certificate
|
||||
- [ ] register_certificate_without_ca
|
||||
- [X] register_certificate_without_ca
|
||||
- [ ] register_thing
|
||||
- [ ] reject_certificate_transfer
|
||||
- [ ] remove_thing_from_billing_group
|
||||
@ -4837,7 +4837,6 @@
|
||||
- [ ] describe_configuration
|
||||
- [ ] describe_configuration_revision
|
||||
- [ ] get_bootstrap_brokers
|
||||
- [ ] get_compatible_kafka_versions
|
||||
- [ ] list_cluster_operations
|
||||
- [ ] list_clusters
|
||||
- [ ] list_configuration_revisions
|
||||
@ -4850,7 +4849,6 @@
|
||||
- [ ] update_broker_count
|
||||
- [ ] update_broker_storage
|
||||
- [ ] update_cluster_configuration
|
||||
- [ ] update_cluster_kafka_version
|
||||
- [ ] update_monitoring
|
||||
</details>
|
||||
|
||||
@ -4920,11 +4918,11 @@
|
||||
|
||||
## kinesis-video-archived-media
|
||||
<details>
|
||||
<summary>0% implemented</summary>
|
||||
<summary>60% implemented</summary>
|
||||
|
||||
- [ ] get_clip
|
||||
- [ ] get_dash_streaming_session_url
|
||||
- [ ] get_hls_streaming_session_url
|
||||
- [X] get_clip
|
||||
- [X] get_dash_streaming_session_url
|
||||
- [X] get_hls_streaming_session_url
|
||||
- [ ] get_media_for_fragment_list
|
||||
- [ ] list_fragments
|
||||
</details>
|
||||
@ -5004,18 +5002,18 @@
|
||||
|
||||
## kinesisvideo
|
||||
<details>
|
||||
<summary>0% implemented</summary>
|
||||
<summary>26% implemented</summary>
|
||||
|
||||
- [ ] create_signaling_channel
|
||||
- [ ] create_stream
|
||||
- [X] create_stream
|
||||
- [ ] delete_signaling_channel
|
||||
- [ ] delete_stream
|
||||
- [X] delete_stream
|
||||
- [ ] describe_signaling_channel
|
||||
- [ ] describe_stream
|
||||
- [ ] get_data_endpoint
|
||||
- [X] describe_stream
|
||||
- [X] get_data_endpoint
|
||||
- [ ] get_signaling_channel_endpoint
|
||||
- [ ] list_signaling_channels
|
||||
- [ ] list_streams
|
||||
- [X] list_streams
|
||||
- [ ] list_tags_for_resource
|
||||
- [ ] list_tags_for_stream
|
||||
- [ ] tag_resource
|
||||
@ -5100,7 +5098,7 @@
|
||||
|
||||
## lambda
|
||||
<details>
|
||||
<summary>38% implemented</summary>
|
||||
<summary>44% implemented</summary>
|
||||
|
||||
- [ ] add_layer_version_permission
|
||||
- [X] add_permission
|
||||
@ -6100,7 +6098,7 @@
|
||||
|
||||
## organizations
|
||||
<details>
|
||||
<summary>47% implemented</summary>
|
||||
<summary>68% implemented</summary>
|
||||
|
||||
- [ ] accept_handshake
|
||||
- [X] attach_policy
|
||||
@ -6114,7 +6112,7 @@
|
||||
- [ ] delete_organization
|
||||
- [ ] delete_organizational_unit
|
||||
- [X] delete_policy
|
||||
- [ ] deregister_delegated_administrator
|
||||
- [X] deregister_delegated_administrator
|
||||
- [X] describe_account
|
||||
- [X] describe_create_account_status
|
||||
- [ ] describe_effective_policy
|
||||
@ -6123,20 +6121,20 @@
|
||||
- [X] describe_organizational_unit
|
||||
- [X] describe_policy
|
||||
- [ ] detach_policy
|
||||
- [ ] disable_aws_service_access
|
||||
- [ ] disable_policy_type
|
||||
- [X] disable_aws_service_access
|
||||
- [X] disable_policy_type
|
||||
- [ ] enable_all_features
|
||||
- [ ] enable_aws_service_access
|
||||
- [ ] enable_policy_type
|
||||
- [X] enable_aws_service_access
|
||||
- [X] enable_policy_type
|
||||
- [ ] invite_account_to_organization
|
||||
- [ ] leave_organization
|
||||
- [X] list_accounts
|
||||
- [X] list_accounts_for_parent
|
||||
- [ ] list_aws_service_access_for_organization
|
||||
- [X] list_aws_service_access_for_organization
|
||||
- [X] list_children
|
||||
- [ ] list_create_account_status
|
||||
- [ ] list_delegated_administrators
|
||||
- [ ] list_delegated_services_for_account
|
||||
- [X] list_delegated_administrators
|
||||
- [X] list_delegated_services_for_account
|
||||
- [ ] list_handshakes_for_account
|
||||
- [ ] list_handshakes_for_organization
|
||||
- [X] list_organizational_units_for_parent
|
||||
@ -6147,7 +6145,7 @@
|
||||
- [X] list_tags_for_resource
|
||||
- [X] list_targets_for_policy
|
||||
- [X] move_account
|
||||
- [ ] register_delegated_administrator
|
||||
- [X] register_delegated_administrator
|
||||
- [ ] remove_account_from_organization
|
||||
- [X] tag_resource
|
||||
- [X] untag_resource
|
||||
@ -6545,21 +6543,21 @@
|
||||
|
||||
## ram
|
||||
<details>
|
||||
<summary>0% implemented</summary>
|
||||
<summary>20% implemented</summary>
|
||||
|
||||
- [ ] accept_resource_share_invitation
|
||||
- [ ] associate_resource_share
|
||||
- [ ] associate_resource_share_permission
|
||||
- [ ] create_resource_share
|
||||
- [ ] delete_resource_share
|
||||
- [X] create_resource_share
|
||||
- [X] delete_resource_share
|
||||
- [ ] disassociate_resource_share
|
||||
- [ ] disassociate_resource_share_permission
|
||||
- [ ] enable_sharing_with_aws_organization
|
||||
- [X] enable_sharing_with_aws_organization
|
||||
- [ ] get_permission
|
||||
- [ ] get_resource_policies
|
||||
- [ ] get_resource_share_associations
|
||||
- [ ] get_resource_share_invitations
|
||||
- [ ] get_resource_shares
|
||||
- [X] get_resource_shares
|
||||
- [ ] list_pending_invitation_resources
|
||||
- [ ] list_permissions
|
||||
- [ ] list_principals
|
||||
@ -6570,7 +6568,7 @@
|
||||
- [ ] reject_resource_share_invitation
|
||||
- [ ] tag_resource
|
||||
- [ ] untag_resource
|
||||
- [ ] update_resource_share
|
||||
- [X] update_resource_share
|
||||
</details>
|
||||
|
||||
## rds
|
||||
@ -7074,7 +7072,7 @@
|
||||
|
||||
## s3
|
||||
<details>
|
||||
<summary>25% implemented</summary>
|
||||
<summary>26% implemented</summary>
|
||||
|
||||
- [ ] abort_multipart_upload
|
||||
- [ ] complete_multipart_upload
|
||||
@ -7093,7 +7091,7 @@
|
||||
- [X] delete_bucket_tagging
|
||||
- [ ] delete_bucket_website
|
||||
- [X] delete_object
|
||||
- [x] delete_object_tagging
|
||||
- [X] delete_object_tagging
|
||||
- [ ] delete_objects
|
||||
- [ ] delete_public_access_block
|
||||
- [ ] get_bucket_accelerate_configuration
|
||||
@ -7193,7 +7191,7 @@
|
||||
|
||||
## sagemaker
|
||||
<details>
|
||||
<summary>0% implemented</summary>
|
||||
<summary>12% implemented</summary>
|
||||
|
||||
- [ ] add_tags
|
||||
- [ ] associate_trial_component
|
||||
@ -7203,22 +7201,22 @@
|
||||
- [ ] create_code_repository
|
||||
- [ ] create_compilation_job
|
||||
- [ ] create_domain
|
||||
- [ ] create_endpoint
|
||||
- [ ] create_endpoint_config
|
||||
- [X] create_endpoint
|
||||
- [X] create_endpoint_config
|
||||
- [ ] create_experiment
|
||||
- [ ] create_flow_definition
|
||||
- [ ] create_human_task_ui
|
||||
- [ ] create_hyper_parameter_tuning_job
|
||||
- [ ] create_labeling_job
|
||||
- [ ] create_model
|
||||
- [X] create_model
|
||||
- [ ] create_model_package
|
||||
- [ ] create_monitoring_schedule
|
||||
- [ ] create_notebook_instance
|
||||
- [X] create_notebook_instance
|
||||
- [ ] create_notebook_instance_lifecycle_config
|
||||
- [ ] create_presigned_domain_url
|
||||
- [ ] create_presigned_notebook_instance_url
|
||||
- [ ] create_processing_job
|
||||
- [ ] create_training_job
|
||||
- [X] create_training_job
|
||||
- [ ] create_transform_job
|
||||
- [ ] create_trial
|
||||
- [ ] create_trial_component
|
||||
@ -7228,14 +7226,14 @@
|
||||
- [ ] delete_app
|
||||
- [ ] delete_code_repository
|
||||
- [ ] delete_domain
|
||||
- [ ] delete_endpoint
|
||||
- [ ] delete_endpoint_config
|
||||
- [X] delete_endpoint
|
||||
- [X] delete_endpoint_config
|
||||
- [ ] delete_experiment
|
||||
- [ ] delete_flow_definition
|
||||
- [ ] delete_model
|
||||
- [X] delete_model
|
||||
- [ ] delete_model_package
|
||||
- [ ] delete_monitoring_schedule
|
||||
- [ ] delete_notebook_instance
|
||||
- [X] delete_notebook_instance
|
||||
- [ ] delete_notebook_instance_lifecycle_config
|
||||
- [ ] delete_tags
|
||||
- [ ] delete_trial
|
||||
@ -7248,21 +7246,21 @@
|
||||
- [ ] describe_code_repository
|
||||
- [ ] describe_compilation_job
|
||||
- [ ] describe_domain
|
||||
- [ ] describe_endpoint
|
||||
- [ ] describe_endpoint_config
|
||||
- [X] describe_endpoint
|
||||
- [X] describe_endpoint_config
|
||||
- [ ] describe_experiment
|
||||
- [ ] describe_flow_definition
|
||||
- [ ] describe_human_task_ui
|
||||
- [ ] describe_hyper_parameter_tuning_job
|
||||
- [ ] describe_labeling_job
|
||||
- [ ] describe_model
|
||||
- [X] describe_model
|
||||
- [ ] describe_model_package
|
||||
- [ ] describe_monitoring_schedule
|
||||
- [ ] describe_notebook_instance
|
||||
- [ ] describe_notebook_instance_lifecycle_config
|
||||
- [ ] describe_processing_job
|
||||
- [ ] describe_subscribed_workteam
|
||||
- [ ] describe_training_job
|
||||
- [X] describe_training_job
|
||||
- [ ] describe_transform_job
|
||||
- [ ] describe_trial
|
||||
- [ ] describe_trial_component
|
||||
@ -7287,7 +7285,7 @@
|
||||
- [ ] list_labeling_jobs
|
||||
- [ ] list_labeling_jobs_for_workteam
|
||||
- [ ] list_model_packages
|
||||
- [ ] list_models
|
||||
- [X] list_models
|
||||
- [ ] list_monitoring_executions
|
||||
- [ ] list_monitoring_schedules
|
||||
- [ ] list_notebook_instance_lifecycle_configs
|
||||
@ -7305,13 +7303,13 @@
|
||||
- [ ] render_ui_template
|
||||
- [ ] search
|
||||
- [ ] start_monitoring_schedule
|
||||
- [ ] start_notebook_instance
|
||||
- [X] start_notebook_instance
|
||||
- [ ] stop_auto_ml_job
|
||||
- [ ] stop_compilation_job
|
||||
- [ ] stop_hyper_parameter_tuning_job
|
||||
- [ ] stop_labeling_job
|
||||
- [ ] stop_monitoring_schedule
|
||||
- [ ] stop_notebook_instance
|
||||
- [X] stop_notebook_instance
|
||||
- [ ] stop_processing_job
|
||||
- [ ] stop_training_job
|
||||
- [ ] stop_transform_job
|
||||
@ -7645,7 +7643,7 @@
|
||||
|
||||
## ses
|
||||
<details>
|
||||
<summary>21% implemented</summary>
|
||||
<summary>23% implemented</summary>
|
||||
|
||||
- [ ] clone_receipt_rule_set
|
||||
- [X] create_configuration_set
|
||||
@ -7653,8 +7651,8 @@
|
||||
- [ ] create_configuration_set_tracking_options
|
||||
- [ ] create_custom_verification_email_template
|
||||
- [ ] create_receipt_filter
|
||||
- [ ] create_receipt_rule
|
||||
- [ ] create_receipt_rule_set
|
||||
- [X] create_receipt_rule
|
||||
- [X] create_receipt_rule_set
|
||||
- [ ] create_template
|
||||
- [ ] delete_configuration_set
|
||||
- [ ] delete_configuration_set_event_destination
|
||||
@ -7959,7 +7957,7 @@
|
||||
|
||||
## ssm
|
||||
<details>
|
||||
<summary>12% implemented</summary>
|
||||
<summary>18% implemented</summary>
|
||||
|
||||
- [X] add_tags_to_resource
|
||||
- [ ] cancel_command
|
||||
@ -7967,14 +7965,14 @@
|
||||
- [ ] create_activation
|
||||
- [ ] create_association
|
||||
- [ ] create_association_batch
|
||||
- [ ] create_document
|
||||
- [X] create_document
|
||||
- [ ] create_maintenance_window
|
||||
- [ ] create_ops_item
|
||||
- [ ] create_patch_baseline
|
||||
- [ ] create_resource_data_sync
|
||||
- [ ] delete_activation
|
||||
- [ ] delete_association
|
||||
- [ ] delete_document
|
||||
- [X] delete_document
|
||||
- [ ] delete_inventory
|
||||
- [ ] delete_maintenance_window
|
||||
- [X] delete_parameter
|
||||
@ -7992,7 +7990,7 @@
|
||||
- [ ] describe_automation_executions
|
||||
- [ ] describe_automation_step_executions
|
||||
- [ ] describe_available_patches
|
||||
- [ ] describe_document
|
||||
- [X] describe_document
|
||||
- [ ] describe_document_permission
|
||||
- [ ] describe_effective_instance_associations
|
||||
- [ ] describe_effective_patches_for_patch_baseline
|
||||
@ -8023,7 +8021,7 @@
|
||||
- [ ] get_connection_status
|
||||
- [ ] get_default_patch_baseline
|
||||
- [ ] get_deployable_patch_snapshot_for_instance
|
||||
- [ ] get_document
|
||||
- [X] get_document
|
||||
- [ ] get_inventory
|
||||
- [ ] get_inventory_schema
|
||||
- [ ] get_maintenance_window
|
||||
@ -8048,7 +8046,7 @@
|
||||
- [ ] list_compliance_items
|
||||
- [ ] list_compliance_summaries
|
||||
- [ ] list_document_versions
|
||||
- [ ] list_documents
|
||||
- [X] list_documents
|
||||
- [ ] list_inventory_entries
|
||||
- [ ] list_resource_compliance_summaries
|
||||
- [ ] list_resource_data_sync
|
||||
@ -8073,8 +8071,8 @@
|
||||
- [ ] terminate_session
|
||||
- [ ] update_association
|
||||
- [ ] update_association_status
|
||||
- [ ] update_document
|
||||
- [ ] update_document_default_version
|
||||
- [X] update_document
|
||||
- [X] update_document_default_version
|
||||
- [ ] update_maintenance_window
|
||||
- [ ] update_maintenance_window_target
|
||||
- [ ] update_maintenance_window_task
|
||||
@ -8706,7 +8704,6 @@
|
||||
- [ ] delete_group
|
||||
- [ ] delete_mailbox_permissions
|
||||
- [ ] delete_resource
|
||||
- [ ] delete_retention_policy
|
||||
- [ ] delete_user
|
||||
- [ ] deregister_from_work_mail
|
||||
- [ ] describe_group
|
||||
@ -8716,7 +8713,6 @@
|
||||
- [ ] disassociate_delegate_from_resource
|
||||
- [ ] disassociate_member_from_group
|
||||
- [ ] get_access_control_effect
|
||||
- [ ] get_default_retention_policy
|
||||
- [ ] get_mailbox_details
|
||||
- [ ] list_access_control_rules
|
||||
- [ ] list_aliases
|
||||
@ -8730,7 +8726,6 @@
|
||||
- [ ] list_users
|
||||
- [ ] put_access_control_rule
|
||||
- [ ] put_mailbox_permissions
|
||||
- [ ] put_retention_policy
|
||||
- [ ] register_to_work_mail
|
||||
- [ ] reset_password
|
||||
- [ ] tag_resource
|
||||
|
6
Makefile
6
Makefile
@ -3,7 +3,11 @@ SHELL := /bin/bash
|
||||
ifeq ($(TEST_SERVER_MODE), true)
|
||||
# exclude test_iot and test_iotdata for now
|
||||
# because authentication of iot is very complicated
|
||||
TEST_EXCLUDE := --exclude='test_iot.*'
|
||||
|
||||
# exclude test_kinesisvideoarchivedmedia
|
||||
# because testing with moto_server is difficult with data-endpoint
|
||||
|
||||
TEST_EXCLUDE := --exclude='test_iot.*' --exclude="test_kinesisvideoarchivedmedia.*"
|
||||
else
|
||||
TEST_EXCLUDE :=
|
||||
endif
|
||||
|
@ -113,6 +113,10 @@ mock_swf_deprecated = lazy_load(".swf", "mock_swf_deprecated")
|
||||
XRaySegment = lazy_load(".xray", "XRaySegment")
|
||||
mock_xray = lazy_load(".xray", "mock_xray")
|
||||
mock_xray_client = lazy_load(".xray", "mock_xray_client")
|
||||
mock_kinesisvideo = lazy_load(".kinesisvideo", "mock_kinesisvideo")
|
||||
mock_kinesisvideoarchivedmedia = lazy_load(
|
||||
".kinesisvideoarchivedmedia", "mock_kinesisvideoarchivedmedia"
|
||||
)
|
||||
|
||||
# import logging
|
||||
# logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
@ -449,7 +449,7 @@ class APIGatewayResponse(BaseResponse):
|
||||
except ApiKeyAlreadyExists as error:
|
||||
return (
|
||||
error.code,
|
||||
self.headers,
|
||||
{},
|
||||
'{{"message":"{0}","code":"{1}"}}'.format(
|
||||
error.message, error.error_type
|
||||
),
|
||||
|
@ -702,10 +702,13 @@ class EventSourceMapping(CloudFormationModel):
|
||||
)
|
||||
|
||||
for esm in esms:
|
||||
if esm.logical_resource_id in resource_name:
|
||||
lambda_backend.delete_event_source_mapping
|
||||
if esm.uuid == resource_name:
|
||||
esm.delete(region_name)
|
||||
|
||||
@property
|
||||
def physical_resource_id(self):
|
||||
return self.uuid
|
||||
|
||||
|
||||
class LambdaVersion(CloudFormationModel):
|
||||
def __init__(self, spec):
|
||||
|
@ -69,6 +69,11 @@ BACKENDS = {
|
||||
"sts": ("sts", "sts_backends"),
|
||||
"swf": ("swf", "swf_backends"),
|
||||
"xray": ("xray", "xray_backends"),
|
||||
"kinesisvideo": ("kinesisvideo", "kinesisvideo_backends"),
|
||||
"kinesis-video-archived-media": (
|
||||
"kinesisvideoarchivedmedia",
|
||||
"kinesisvideoarchivedmedia_backends",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
@ -246,12 +246,14 @@ def generate_resource_name(resource_type, stack_name, logical_id):
|
||||
return "{0}{1}".format(
|
||||
stack_name[:max_stack_name_portion_len], right_hand_part_of_name
|
||||
).lower()
|
||||
elif resource_type == "AWS::IAM::Policy":
|
||||
return "{0}-{1}-{2}".format(stack_name[:5], logical_id[:4], random_suffix())
|
||||
else:
|
||||
return "{0}-{1}-{2}".format(stack_name, logical_id, random_suffix())
|
||||
|
||||
|
||||
def parse_resource(
|
||||
logical_id, resource_json, resources_map, add_name_to_resource_json=True
|
||||
resource_json, resources_map,
|
||||
):
|
||||
resource_type = resource_json["Type"]
|
||||
resource_class = resource_class_from_type(resource_type)
|
||||
@ -263,21 +265,37 @@ def parse_resource(
|
||||
)
|
||||
return None
|
||||
|
||||
if "Properties" not in resource_json:
|
||||
resource_json["Properties"] = {}
|
||||
|
||||
resource_json = clean_json(resource_json, resources_map)
|
||||
resource_name = generate_resource_name(
|
||||
|
||||
return resource_class, resource_json, resource_type
|
||||
|
||||
|
||||
def parse_resource_and_generate_name(
|
||||
logical_id, resource_json, resources_map,
|
||||
):
|
||||
resource_tuple = parse_resource(resource_json, resources_map)
|
||||
if not resource_tuple:
|
||||
return None
|
||||
resource_class, resource_json, resource_type = resource_tuple
|
||||
|
||||
generated_resource_name = generate_resource_name(
|
||||
resource_type, resources_map.get("AWS::StackName"), logical_id
|
||||
)
|
||||
|
||||
resource_name_property = resource_name_property_from_type(resource_type)
|
||||
if resource_name_property:
|
||||
if "Properties" not in resource_json:
|
||||
resource_json["Properties"] = dict()
|
||||
if (
|
||||
add_name_to_resource_json
|
||||
and resource_name_property not in resource_json["Properties"]
|
||||
"Properties" in resource_json
|
||||
and resource_name_property in resource_json["Properties"]
|
||||
):
|
||||
resource_json["Properties"][resource_name_property] = resource_name
|
||||
if resource_name_property in resource_json["Properties"]:
|
||||
resource_name = resource_json["Properties"][resource_name_property]
|
||||
else:
|
||||
resource_name = generated_resource_name
|
||||
else:
|
||||
resource_name = generated_resource_name
|
||||
|
||||
return resource_class, resource_json, resource_name
|
||||
|
||||
@ -289,12 +307,14 @@ def parse_and_create_resource(logical_id, resource_json, resources_map, region_n
|
||||
return None
|
||||
|
||||
resource_type = resource_json["Type"]
|
||||
resource_tuple = parse_resource(logical_id, resource_json, resources_map)
|
||||
resource_tuple = parse_resource_and_generate_name(
|
||||
logical_id, resource_json, resources_map
|
||||
)
|
||||
if not resource_tuple:
|
||||
return None
|
||||
resource_class, resource_json, resource_name = resource_tuple
|
||||
resource_class, resource_json, resource_physical_name = resource_tuple
|
||||
resource = resource_class.create_from_cloudformation_json(
|
||||
resource_name, resource_json, region_name
|
||||
resource_physical_name, resource_json, region_name
|
||||
)
|
||||
resource.type = resource_type
|
||||
resource.logical_resource_id = logical_id
|
||||
@ -302,28 +322,34 @@ def parse_and_create_resource(logical_id, resource_json, resources_map, region_n
|
||||
|
||||
|
||||
def parse_and_update_resource(logical_id, resource_json, resources_map, region_name):
|
||||
resource_class, new_resource_json, new_resource_name = parse_resource(
|
||||
logical_id, resource_json, resources_map, False
|
||||
)
|
||||
original_resource = resources_map[logical_id]
|
||||
new_resource = resource_class.update_from_cloudformation_json(
|
||||
original_resource=original_resource,
|
||||
new_resource_name=new_resource_name,
|
||||
cloudformation_json=new_resource_json,
|
||||
region_name=region_name,
|
||||
)
|
||||
new_resource.type = resource_json["Type"]
|
||||
new_resource.logical_resource_id = logical_id
|
||||
return new_resource
|
||||
|
||||
|
||||
def parse_and_delete_resource(logical_id, resource_json, resources_map, region_name):
|
||||
resource_class, resource_json, resource_name = parse_resource(
|
||||
resource_class, resource_json, new_resource_name = parse_resource_and_generate_name(
|
||||
logical_id, resource_json, resources_map
|
||||
)
|
||||
resource_class.delete_from_cloudformation_json(
|
||||
resource_name, resource_json, region_name
|
||||
)
|
||||
original_resource = resources_map[logical_id]
|
||||
if not hasattr(
|
||||
resource_class.update_from_cloudformation_json, "__isabstractmethod__"
|
||||
):
|
||||
new_resource = resource_class.update_from_cloudformation_json(
|
||||
original_resource=original_resource,
|
||||
new_resource_name=new_resource_name,
|
||||
cloudformation_json=resource_json,
|
||||
region_name=region_name,
|
||||
)
|
||||
new_resource.type = resource_json["Type"]
|
||||
new_resource.logical_resource_id = logical_id
|
||||
return new_resource
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def parse_and_delete_resource(resource_name, resource_json, resources_map, region_name):
|
||||
resource_class, resource_json, _ = parse_resource(resource_json, resources_map)
|
||||
if not hasattr(
|
||||
resource_class.delete_from_cloudformation_json, "__isabstractmethod__"
|
||||
):
|
||||
resource_class.delete_from_cloudformation_json(
|
||||
resource_name, resource_json, region_name
|
||||
)
|
||||
|
||||
|
||||
def parse_condition(condition, resources_map, condition_map):
|
||||
@ -614,28 +640,36 @@ class ResourceMap(collections_abc.Mapping):
|
||||
)
|
||||
self._parsed_resources[resource_name] = new_resource
|
||||
|
||||
for resource_name, resource in resources_by_action["Remove"].items():
|
||||
resource_json = old_template[resource_name]
|
||||
for logical_name, _ in resources_by_action["Remove"].items():
|
||||
resource_json = old_template[logical_name]
|
||||
resource = self._parsed_resources[logical_name]
|
||||
# ToDo: Standardize this.
|
||||
if hasattr(resource, "physical_resource_id"):
|
||||
resource_name = self._parsed_resources[
|
||||
logical_name
|
||||
].physical_resource_id
|
||||
else:
|
||||
resource_name = None
|
||||
parse_and_delete_resource(
|
||||
resource_name, resource_json, self, self._region_name
|
||||
)
|
||||
self._parsed_resources.pop(resource_name)
|
||||
self._parsed_resources.pop(logical_name)
|
||||
|
||||
tries = 1
|
||||
while resources_by_action["Modify"] and tries < 5:
|
||||
for resource_name, resource in resources_by_action["Modify"].copy().items():
|
||||
resource_json = new_template[resource_name]
|
||||
for logical_name, _ in resources_by_action["Modify"].copy().items():
|
||||
resource_json = new_template[logical_name]
|
||||
try:
|
||||
changed_resource = parse_and_update_resource(
|
||||
resource_name, resource_json, self, self._region_name
|
||||
logical_name, resource_json, self, self._region_name
|
||||
)
|
||||
except Exception as e:
|
||||
# skip over dependency violations, and try again in a
|
||||
# second pass
|
||||
last_exception = e
|
||||
else:
|
||||
self._parsed_resources[resource_name] = changed_resource
|
||||
del resources_by_action["Modify"][resource_name]
|
||||
self._parsed_resources[logical_name] = changed_resource
|
||||
del resources_by_action["Modify"][logical_name]
|
||||
tries += 1
|
||||
if tries == 5:
|
||||
raise last_exception
|
||||
@ -650,22 +684,20 @@ class ResourceMap(collections_abc.Mapping):
|
||||
if parsed_resource and hasattr(parsed_resource, "delete"):
|
||||
parsed_resource.delete(self._region_name)
|
||||
else:
|
||||
resource_name_attribute = (
|
||||
parsed_resource.cloudformation_name_type()
|
||||
if hasattr(parsed_resource, "cloudformation_name_type")
|
||||
else resource_name_property_from_type(parsed_resource.type)
|
||||
if hasattr(parsed_resource, "physical_resource_id"):
|
||||
resource_name = parsed_resource.physical_resource_id
|
||||
else:
|
||||
resource_name = None
|
||||
|
||||
resource_json = self._resource_json_map[
|
||||
parsed_resource.logical_resource_id
|
||||
]
|
||||
|
||||
parse_and_delete_resource(
|
||||
resource_name, resource_json, self, self._region_name,
|
||||
)
|
||||
if resource_name_attribute:
|
||||
resource_json = self._resource_json_map[
|
||||
parsed_resource.logical_resource_id
|
||||
]
|
||||
resource_name = resource_json["Properties"][
|
||||
resource_name_attribute
|
||||
]
|
||||
parse_and_delete_resource(
|
||||
resource_name, resource_json, self, self._region_name
|
||||
)
|
||||
self._parsed_resources.pop(parsed_resource.logical_resource_id)
|
||||
|
||||
self._parsed_resources.pop(parsed_resource.logical_resource_id)
|
||||
except Exception as e:
|
||||
# skip over dependency violations, and try again in a
|
||||
# second pass
|
||||
|
@ -511,10 +511,9 @@ class LogGroup(CloudFormationModel):
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
log_group_name = properties["LogGroupName"]
|
||||
tags = properties.get("Tags", {})
|
||||
return logs_backends[region_name].create_log_group(
|
||||
log_group_name, tags, **properties
|
||||
resource_name, tags, **properties
|
||||
)
|
||||
|
||||
|
||||
|
@ -45,6 +45,14 @@ class NotAuthorizedError(BadRequest):
|
||||
)
|
||||
|
||||
|
||||
class UserNotConfirmedException(BadRequest):
|
||||
def __init__(self, message):
|
||||
super(UserNotConfirmedException, self).__init__()
|
||||
self.description = json.dumps(
|
||||
{"message": message, "__type": "UserNotConfirmedException"}
|
||||
)
|
||||
|
||||
|
||||
class InvalidParameterException(JsonRESTError):
|
||||
def __init__(self, msg=None):
|
||||
self.code = 400
|
||||
|
@ -21,13 +21,15 @@ from .exceptions import (
|
||||
ResourceNotFoundError,
|
||||
UserNotFoundError,
|
||||
UsernameExistsException,
|
||||
UserNotConfirmedException,
|
||||
InvalidParameterException,
|
||||
)
|
||||
from .utils import create_id
|
||||
from .utils import create_id, check_secret_hash
|
||||
|
||||
UserStatus = {
|
||||
"FORCE_CHANGE_PASSWORD": "FORCE_CHANGE_PASSWORD",
|
||||
"CONFIRMED": "CONFIRMED",
|
||||
"UNCONFIRMED": "UNCONFIRMED",
|
||||
}
|
||||
|
||||
|
||||
@ -300,6 +302,9 @@ class CognitoIdpUser(BaseModel):
|
||||
self.attributes = attributes
|
||||
self.create_date = datetime.datetime.utcnow()
|
||||
self.last_modified_date = datetime.datetime.utcnow()
|
||||
self.sms_mfa_enabled = False
|
||||
self.software_token_mfa_enabled = False
|
||||
self.token_verified = False
|
||||
|
||||
# Groups this user is a member of.
|
||||
# Note that these links are bidirectional.
|
||||
@ -316,6 +321,11 @@ class CognitoIdpUser(BaseModel):
|
||||
|
||||
# list_users brings back "Attributes" while admin_get_user brings back "UserAttributes".
|
||||
def to_json(self, extended=False, attributes_key="Attributes"):
|
||||
user_mfa_setting_list = []
|
||||
if self.software_token_mfa_enabled:
|
||||
user_mfa_setting_list.append("SOFTWARE_TOKEN_MFA")
|
||||
elif self.sms_mfa_enabled:
|
||||
user_mfa_setting_list.append("SMS_MFA")
|
||||
user_json = self._base_json()
|
||||
if extended:
|
||||
user_json.update(
|
||||
@ -323,6 +333,7 @@ class CognitoIdpUser(BaseModel):
|
||||
"Enabled": self.enabled,
|
||||
attributes_key: self.attributes,
|
||||
"MFAOptions": [],
|
||||
"UserMFASettingList": user_mfa_setting_list,
|
||||
}
|
||||
)
|
||||
|
||||
@ -731,6 +742,9 @@ class CognitoIdpBackend(BaseBackend):
|
||||
def respond_to_auth_challenge(
|
||||
self, session, client_id, challenge_name, challenge_responses
|
||||
):
|
||||
if challenge_name == "PASSWORD_VERIFIER":
|
||||
session = challenge_responses.get("PASSWORD_CLAIM_SECRET_BLOCK")
|
||||
|
||||
user_pool = self.sessions.get(session)
|
||||
if not user_pool:
|
||||
raise ResourceNotFoundError(session)
|
||||
@ -751,6 +765,62 @@ class CognitoIdpBackend(BaseBackend):
|
||||
del self.sessions[session]
|
||||
|
||||
return self._log_user_in(user_pool, client, username)
|
||||
elif challenge_name == "PASSWORD_VERIFIER":
|
||||
username = challenge_responses.get("USERNAME")
|
||||
user = user_pool.users.get(username)
|
||||
if not user:
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
password_claim_signature = challenge_responses.get(
|
||||
"PASSWORD_CLAIM_SIGNATURE"
|
||||
)
|
||||
if not password_claim_signature:
|
||||
raise ResourceNotFoundError(password_claim_signature)
|
||||
password_claim_secret_block = challenge_responses.get(
|
||||
"PASSWORD_CLAIM_SECRET_BLOCK"
|
||||
)
|
||||
if not password_claim_secret_block:
|
||||
raise ResourceNotFoundError(password_claim_secret_block)
|
||||
timestamp = challenge_responses.get("TIMESTAMP")
|
||||
if not timestamp:
|
||||
raise ResourceNotFoundError(timestamp)
|
||||
|
||||
if user.software_token_mfa_enabled:
|
||||
return {
|
||||
"ChallengeName": "SOFTWARE_TOKEN_MFA",
|
||||
"Session": session,
|
||||
"ChallengeParameters": {},
|
||||
}
|
||||
|
||||
if user.sms_mfa_enabled:
|
||||
return {
|
||||
"ChallengeName": "SMS_MFA",
|
||||
"Session": session,
|
||||
"ChallengeParameters": {},
|
||||
}
|
||||
|
||||
del self.sessions[session]
|
||||
return self._log_user_in(user_pool, client, username)
|
||||
elif challenge_name == "SOFTWARE_TOKEN_MFA":
|
||||
username = challenge_responses.get("USERNAME")
|
||||
user = user_pool.users.get(username)
|
||||
if not user:
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
software_token_mfa_code = challenge_responses.get("SOFTWARE_TOKEN_MFA_CODE")
|
||||
if not software_token_mfa_code:
|
||||
raise ResourceNotFoundError(software_token_mfa_code)
|
||||
|
||||
if client.generate_secret:
|
||||
secret_hash = challenge_responses.get("SECRET_HASH")
|
||||
if not check_secret_hash(
|
||||
client.secret, client.id, username, secret_hash
|
||||
):
|
||||
raise NotAuthorizedError(secret_hash)
|
||||
|
||||
del self.sessions[session]
|
||||
return self._log_user_in(user_pool, client, username)
|
||||
|
||||
else:
|
||||
return {}
|
||||
|
||||
@ -806,6 +876,165 @@ class CognitoIdpBackend(BaseBackend):
|
||||
user_pool.resource_servers[identifier] = resource_server
|
||||
return resource_server
|
||||
|
||||
def sign_up(self, client_id, username, password, attributes):
|
||||
user_pool = None
|
||||
for p in self.user_pools.values():
|
||||
if client_id in p.clients:
|
||||
user_pool = p
|
||||
if user_pool is None:
|
||||
raise ResourceNotFoundError(client_id)
|
||||
|
||||
user = CognitoIdpUser(
|
||||
user_pool_id=user_pool.id,
|
||||
username=username,
|
||||
password=password,
|
||||
attributes=attributes,
|
||||
status=UserStatus["UNCONFIRMED"],
|
||||
)
|
||||
user_pool.users[user.username] = user
|
||||
return user
|
||||
|
||||
def confirm_sign_up(self, client_id, username, confirmation_code):
|
||||
user_pool = None
|
||||
for p in self.user_pools.values():
|
||||
if client_id in p.clients:
|
||||
user_pool = p
|
||||
if user_pool is None:
|
||||
raise ResourceNotFoundError(client_id)
|
||||
|
||||
if username not in user_pool.users:
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
user = user_pool.users[username]
|
||||
user.status = UserStatus["CONFIRMED"]
|
||||
return ""
|
||||
|
||||
def initiate_auth(self, client_id, auth_flow, auth_parameters):
|
||||
user_pool = None
|
||||
for p in self.user_pools.values():
|
||||
if client_id in p.clients:
|
||||
user_pool = p
|
||||
if user_pool is None:
|
||||
raise ResourceNotFoundError(client_id)
|
||||
|
||||
client = p.clients.get(client_id)
|
||||
|
||||
if auth_flow == "USER_SRP_AUTH":
|
||||
username = auth_parameters.get("USERNAME")
|
||||
srp_a = auth_parameters.get("SRP_A")
|
||||
if not srp_a:
|
||||
raise ResourceNotFoundError(srp_a)
|
||||
if client.generate_secret:
|
||||
secret_hash = auth_parameters.get("SECRET_HASH")
|
||||
if not check_secret_hash(
|
||||
client.secret, client.id, username, secret_hash
|
||||
):
|
||||
raise NotAuthorizedError(secret_hash)
|
||||
|
||||
user = user_pool.users.get(username)
|
||||
if not user:
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
if user.status == UserStatus["UNCONFIRMED"]:
|
||||
raise UserNotConfirmedException("User is not confirmed.")
|
||||
|
||||
session = str(uuid.uuid4())
|
||||
self.sessions[session] = user_pool
|
||||
|
||||
return {
|
||||
"ChallengeName": "PASSWORD_VERIFIER",
|
||||
"Session": session,
|
||||
"ChallengeParameters": {
|
||||
"SALT": str(uuid.uuid4()),
|
||||
"SRP_B": str(uuid.uuid4()),
|
||||
"USERNAME": user.id,
|
||||
"USER_ID_FOR_SRP": user.id,
|
||||
"SECRET_BLOCK": session,
|
||||
},
|
||||
}
|
||||
elif auth_flow == "REFRESH_TOKEN":
|
||||
refresh_token = auth_parameters.get("REFRESH_TOKEN")
|
||||
if not refresh_token:
|
||||
raise ResourceNotFoundError(refresh_token)
|
||||
|
||||
client_id, username = user_pool.refresh_tokens[refresh_token]
|
||||
if not username:
|
||||
raise ResourceNotFoundError(username)
|
||||
|
||||
if client.generate_secret:
|
||||
secret_hash = auth_parameters.get("SECRET_HASH")
|
||||
if not check_secret_hash(
|
||||
client.secret, client.id, username, secret_hash
|
||||
):
|
||||
raise NotAuthorizedError(secret_hash)
|
||||
|
||||
(
|
||||
id_token,
|
||||
access_token,
|
||||
expires_in,
|
||||
) = user_pool.create_tokens_from_refresh_token(refresh_token)
|
||||
|
||||
return {
|
||||
"AuthenticationResult": {
|
||||
"IdToken": id_token,
|
||||
"AccessToken": access_token,
|
||||
"ExpiresIn": expires_in,
|
||||
}
|
||||
}
|
||||
else:
|
||||
return None
|
||||
|
||||
def associate_software_token(self, access_token):
|
||||
for user_pool in self.user_pools.values():
|
||||
if access_token in user_pool.access_tokens:
|
||||
_, username = user_pool.access_tokens[access_token]
|
||||
user = user_pool.users.get(username)
|
||||
if not user:
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
return {"SecretCode": str(uuid.uuid4())}
|
||||
else:
|
||||
raise NotAuthorizedError(access_token)
|
||||
|
||||
def verify_software_token(self, access_token, user_code):
|
||||
for user_pool in self.user_pools.values():
|
||||
if access_token in user_pool.access_tokens:
|
||||
_, username = user_pool.access_tokens[access_token]
|
||||
user = user_pool.users.get(username)
|
||||
if not user:
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
user.token_verified = True
|
||||
|
||||
return {"Status": "SUCCESS"}
|
||||
else:
|
||||
raise NotAuthorizedError(access_token)
|
||||
|
||||
def set_user_mfa_preference(
|
||||
self, access_token, software_token_mfa_settings, sms_mfa_settings
|
||||
):
|
||||
for user_pool in self.user_pools.values():
|
||||
if access_token in user_pool.access_tokens:
|
||||
_, username = user_pool.access_tokens[access_token]
|
||||
user = user_pool.users.get(username)
|
||||
if not user:
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
if software_token_mfa_settings["Enabled"]:
|
||||
if user.token_verified:
|
||||
user.software_token_mfa_enabled = True
|
||||
else:
|
||||
raise InvalidParameterException(
|
||||
"User has not verified software token mfa"
|
||||
)
|
||||
|
||||
elif sms_mfa_settings["Enabled"]:
|
||||
user.sms_mfa_enabled = True
|
||||
|
||||
return None
|
||||
else:
|
||||
raise NotAuthorizedError(access_token)
|
||||
|
||||
|
||||
cognitoidp_backends = {}
|
||||
for region in Session().get_available_regions("cognito-idp"):
|
||||
|
@ -4,7 +4,7 @@ import json
|
||||
import os
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from .models import cognitoidp_backends, find_region_by_value
|
||||
from .models import cognitoidp_backends, find_region_by_value, UserStatus
|
||||
|
||||
|
||||
class CognitoIdpResponse(BaseResponse):
|
||||
@ -390,6 +390,65 @@ class CognitoIdpResponse(BaseResponse):
|
||||
)
|
||||
return json.dumps({"ResourceServer": resource_server.to_json()})
|
||||
|
||||
def sign_up(self):
|
||||
client_id = self._get_param("ClientId")
|
||||
username = self._get_param("Username")
|
||||
password = self._get_param("Password")
|
||||
user = cognitoidp_backends[self.region].sign_up(
|
||||
client_id=client_id,
|
||||
username=username,
|
||||
password=password,
|
||||
attributes=self._get_param("UserAttributes", []),
|
||||
)
|
||||
return json.dumps(
|
||||
{
|
||||
"UserConfirmed": user.status == UserStatus["CONFIRMED"],
|
||||
"UserSub": user.id,
|
||||
}
|
||||
)
|
||||
|
||||
def confirm_sign_up(self):
|
||||
client_id = self._get_param("ClientId")
|
||||
username = self._get_param("Username")
|
||||
confirmation_code = self._get_param("ConfirmationCode")
|
||||
cognitoidp_backends[self.region].confirm_sign_up(
|
||||
client_id=client_id, username=username, confirmation_code=confirmation_code,
|
||||
)
|
||||
return ""
|
||||
|
||||
def initiate_auth(self):
|
||||
client_id = self._get_param("ClientId")
|
||||
auth_flow = self._get_param("AuthFlow")
|
||||
auth_parameters = self._get_param("AuthParameters")
|
||||
|
||||
auth_result = cognitoidp_backends[self.region].initiate_auth(
|
||||
client_id, auth_flow, auth_parameters
|
||||
)
|
||||
|
||||
return json.dumps(auth_result)
|
||||
|
||||
def associate_software_token(self):
|
||||
access_token = self._get_param("AccessToken")
|
||||
result = cognitoidp_backends[self.region].associate_software_token(access_token)
|
||||
return json.dumps(result)
|
||||
|
||||
def verify_software_token(self):
|
||||
access_token = self._get_param("AccessToken")
|
||||
user_code = self._get_param("UserCode")
|
||||
result = cognitoidp_backends[self.region].verify_software_token(
|
||||
access_token, user_code
|
||||
)
|
||||
return json.dumps(result)
|
||||
|
||||
def set_user_mfa_preference(self):
|
||||
access_token = self._get_param("AccessToken")
|
||||
software_token_mfa_settings = self._get_param("SoftwareTokenMfaSettings")
|
||||
sms_mfa_settings = self._get_param("SMSMfaSettings")
|
||||
cognitoidp_backends[self.region].set_user_mfa_preference(
|
||||
access_token, software_token_mfa_settings, sms_mfa_settings
|
||||
)
|
||||
return ""
|
||||
|
||||
|
||||
class CognitoIdpJsonWebKeyResponse(BaseResponse):
|
||||
def __init__(self):
|
||||
|
@ -2,9 +2,20 @@ from __future__ import unicode_literals
|
||||
import six
|
||||
import random
|
||||
import string
|
||||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
|
||||
|
||||
def create_id():
|
||||
size = 26
|
||||
chars = list(range(10)) + list(string.ascii_lowercase)
|
||||
return "".join(six.text_type(random.choice(chars)) for x in range(size))
|
||||
|
||||
|
||||
def check_secret_hash(app_client_secret, app_client_id, username, secret_hash):
|
||||
key = bytes(str(app_client_secret).encode("latin-1"))
|
||||
msg = bytes(str(username + app_client_id).encode("latin-1"))
|
||||
new_digest = hmac.new(key, msg, hashlib.sha256).digest()
|
||||
SECRET_HASH = base64.b64encode(new_digest).decode()
|
||||
return SECRET_HASH == secret_hash
|
||||
|
@ -90,9 +90,9 @@ class Pipeline(CloudFormationModel):
|
||||
datapipeline_backend = datapipeline_backends[region_name]
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
cloudformation_unique_id = "cf-" + properties["Name"]
|
||||
cloudformation_unique_id = "cf-" + resource_name
|
||||
pipeline = datapipeline_backend.create_pipeline(
|
||||
properties["Name"], cloudformation_unique_id
|
||||
resource_name, cloudformation_unique_id
|
||||
)
|
||||
datapipeline_backend.put_pipeline_definition(
|
||||
pipeline.pipeline_id, properties["PipelineObjects"]
|
||||
|
@ -461,7 +461,7 @@ class Table(CloudFormationModel):
|
||||
params["streams"] = properties["StreamSpecification"]
|
||||
|
||||
table = dynamodb_backends[region_name].create_table(
|
||||
name=properties["TableName"], **params
|
||||
name=resource_name, **params
|
||||
)
|
||||
return table
|
||||
|
||||
@ -469,11 +469,7 @@ class Table(CloudFormationModel):
|
||||
def delete_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
table = dynamodb_backends[region_name].delete_table(
|
||||
name=properties["TableName"]
|
||||
)
|
||||
table = dynamodb_backends[region_name].delete_table(name=resource_name)
|
||||
return table
|
||||
|
||||
def _generate_arn(self, name):
|
||||
|
@ -3402,7 +3402,14 @@ class SubnetBackend(object):
|
||||
return subnets[subnet_id]
|
||||
raise InvalidSubnetIdError(subnet_id)
|
||||
|
||||
def create_subnet(self, vpc_id, cidr_block, availability_zone, context=None):
|
||||
def create_subnet(
|
||||
self,
|
||||
vpc_id,
|
||||
cidr_block,
|
||||
availability_zone=None,
|
||||
availability_zone_id=None,
|
||||
context=None,
|
||||
):
|
||||
subnet_id = random_subnet_id()
|
||||
vpc = self.get_vpc(
|
||||
vpc_id
|
||||
@ -3430,15 +3437,25 @@ class SubnetBackend(object):
|
||||
# consider it the default
|
||||
default_for_az = str(availability_zone not in self.subnets).lower()
|
||||
map_public_ip_on_launch = default_for_az
|
||||
if availability_zone is None:
|
||||
|
||||
if availability_zone is None and not availability_zone_id:
|
||||
availability_zone = "us-east-1a"
|
||||
try:
|
||||
availability_zone_data = next(
|
||||
zone
|
||||
for zones in RegionsAndZonesBackend.zones.values()
|
||||
for zone in zones
|
||||
if zone.name == availability_zone
|
||||
)
|
||||
if availability_zone:
|
||||
availability_zone_data = next(
|
||||
zone
|
||||
for zones in RegionsAndZonesBackend.zones.values()
|
||||
for zone in zones
|
||||
if zone.name == availability_zone
|
||||
)
|
||||
elif availability_zone_id:
|
||||
availability_zone_data = next(
|
||||
zone
|
||||
for zones in RegionsAndZonesBackend.zones.values()
|
||||
for zone in zones
|
||||
if zone.zone_id == availability_zone_id
|
||||
)
|
||||
|
||||
except StopIteration:
|
||||
raise InvalidAvailabilityZoneError(
|
||||
availability_zone,
|
||||
|
@ -9,12 +9,14 @@ class Subnets(BaseResponse):
|
||||
def create_subnet(self):
|
||||
vpc_id = self._get_param("VpcId")
|
||||
cidr_block = self._get_param("CidrBlock")
|
||||
availability_zone = self._get_param(
|
||||
"AvailabilityZone",
|
||||
if_none=random.choice(self.ec2_backend.describe_availability_zones()).name,
|
||||
)
|
||||
availability_zone = self._get_param("AvailabilityZone")
|
||||
availability_zone_id = self._get_param("AvailabilityZoneId")
|
||||
if not availability_zone and not availability_zone_id:
|
||||
availability_zone = random.choice(
|
||||
self.ec2_backend.describe_availability_zones()
|
||||
).name
|
||||
subnet = self.ec2_backend.create_subnet(
|
||||
vpc_id, cidr_block, availability_zone, context=self
|
||||
vpc_id, cidr_block, availability_zone, availability_zone_id, context=self
|
||||
)
|
||||
template = self.response_template(CREATE_SUBNET_RESPONSE)
|
||||
return template.render(subnet=subnet)
|
||||
|
@ -80,15 +80,11 @@ class Repository(BaseObject, CloudFormationModel):
|
||||
def create_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
ecr_backend = ecr_backends[region_name]
|
||||
return ecr_backend.create_repository(
|
||||
# RepositoryName is optional in CloudFormation, thus create a random
|
||||
# name if necessary
|
||||
repository_name=properties.get(
|
||||
"RepositoryName", "ecrrepository{0}".format(int(random() * 10 ** 6))
|
||||
)
|
||||
repository_name=resource_name
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -82,36 +82,24 @@ class Cluster(BaseObject, CloudFormationModel):
|
||||
def create_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
# if properties is not provided, cloudformation will use the default values for all properties
|
||||
if "Properties" in cloudformation_json:
|
||||
properties = cloudformation_json["Properties"]
|
||||
else:
|
||||
properties = {}
|
||||
|
||||
ecs_backend = ecs_backends[region_name]
|
||||
return ecs_backend.create_cluster(
|
||||
# ClusterName is optional in CloudFormation, thus create a random
|
||||
# name if necessary
|
||||
cluster_name=properties.get(
|
||||
"ClusterName", "ecscluster{0}".format(int(random() * 10 ** 6))
|
||||
)
|
||||
cluster_name=resource_name
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
if original_resource.name != properties["ClusterName"]:
|
||||
if original_resource.name != new_resource_name:
|
||||
ecs_backend = ecs_backends[region_name]
|
||||
ecs_backend.delete_cluster(original_resource.arn)
|
||||
return ecs_backend.create_cluster(
|
||||
# ClusterName is optional in CloudFormation, thus create a
|
||||
# random name if necessary
|
||||
cluster_name=properties.get(
|
||||
"ClusterName", "ecscluster{0}".format(int(random() * 10 ** 6))
|
||||
)
|
||||
cluster_name=new_resource_name
|
||||
)
|
||||
else:
|
||||
# no-op when nothing changed between old and new resources
|
||||
@ -355,14 +343,13 @@ class Service(BaseObject, CloudFormationModel):
|
||||
task_definition = properties["TaskDefinition"].family
|
||||
else:
|
||||
task_definition = properties["TaskDefinition"]
|
||||
service_name = "{0}Service{1}".format(cluster, int(random() * 10 ** 6))
|
||||
desired_count = properties["DesiredCount"]
|
||||
# TODO: LoadBalancers
|
||||
# TODO: Role
|
||||
|
||||
ecs_backend = ecs_backends[region_name]
|
||||
return ecs_backend.create_service(
|
||||
cluster, service_name, desired_count, task_definition_str=task_definition
|
||||
cluster, resource_name, desired_count, task_definition_str=task_definition
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@ -386,12 +373,9 @@ class Service(BaseObject, CloudFormationModel):
|
||||
# TODO: LoadBalancers
|
||||
# TODO: Role
|
||||
ecs_backend.delete_service(cluster_name, service_name)
|
||||
new_service_name = "{0}Service{1}".format(
|
||||
cluster_name, int(random() * 10 ** 6)
|
||||
)
|
||||
return ecs_backend.create_service(
|
||||
cluster_name,
|
||||
new_service_name,
|
||||
new_resource_name,
|
||||
desired_count,
|
||||
task_definition_str=task_definition,
|
||||
)
|
||||
|
@ -87,7 +87,10 @@ class EC2ContainerServiceResponse(BaseResponse):
|
||||
def describe_task_definition(self):
|
||||
task_definition_str = self._get_param("taskDefinition")
|
||||
data = self.ecs_backend.describe_task_definition(task_definition_str)
|
||||
return json.dumps({"taskDefinition": data.response_object, "failures": []})
|
||||
resp = {"taskDefinition": data.response_object, "failures": []}
|
||||
if "TAGS" in self._get_param("include", []):
|
||||
resp["tags"] = self.ecs_backend.list_tags_for_resource(data.arn)
|
||||
return json.dumps(resp)
|
||||
|
||||
def deregister_task_definition(self):
|
||||
task_definition_str = self._get_param("taskDefinition")
|
||||
@ -191,13 +194,16 @@ class EC2ContainerServiceResponse(BaseResponse):
|
||||
cluster_str = self._get_param("cluster")
|
||||
service_names = self._get_param("services")
|
||||
services = self.ecs_backend.describe_services(cluster_str, service_names)
|
||||
|
||||
return json.dumps(
|
||||
{
|
||||
"services": [service.response_object for service in services],
|
||||
"failures": [],
|
||||
}
|
||||
)
|
||||
resp = {
|
||||
"services": [service.response_object for service in services],
|
||||
"failures": [],
|
||||
}
|
||||
if "TAGS" in self._get_param("include", []):
|
||||
for i, service in enumerate(services):
|
||||
resp["services"][i]["tags"] = self.ecs_backend.list_tags_for_resource(
|
||||
service.arn
|
||||
)
|
||||
return json.dumps(resp)
|
||||
|
||||
def update_service(self):
|
||||
cluster_str = self._get_param("cluster")
|
||||
|
@ -160,7 +160,6 @@ class FakeTargetGroup(CloudFormationModel):
|
||||
|
||||
elbv2_backend = elbv2_backends[region_name]
|
||||
|
||||
name = properties.get("Name")
|
||||
vpc_id = properties.get("VpcId")
|
||||
protocol = properties.get("Protocol")
|
||||
port = properties.get("Port")
|
||||
@ -175,7 +174,7 @@ class FakeTargetGroup(CloudFormationModel):
|
||||
target_type = properties.get("TargetType")
|
||||
|
||||
target_group = elbv2_backend.create_target_group(
|
||||
name=name,
|
||||
name=resource_name,
|
||||
vpc_id=vpc_id,
|
||||
protocol=protocol,
|
||||
port=port,
|
||||
@ -437,13 +436,12 @@ class FakeLoadBalancer(CloudFormationModel):
|
||||
|
||||
elbv2_backend = elbv2_backends[region_name]
|
||||
|
||||
name = properties.get("Name", resource_name)
|
||||
security_groups = properties.get("SecurityGroups")
|
||||
subnet_ids = properties.get("Subnets")
|
||||
scheme = properties.get("Scheme", "internet-facing")
|
||||
|
||||
load_balancer = elbv2_backend.create_load_balancer(
|
||||
name, security_groups, subnet_ids, scheme=scheme
|
||||
resource_name, security_groups, subnet_ids, scheme=scheme
|
||||
)
|
||||
return load_balancer
|
||||
|
||||
|
@ -88,7 +88,7 @@ class Rule(CloudFormationModel):
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
event_backend = events_backends[region_name]
|
||||
event_name = properties.get("Name") or resource_name
|
||||
event_name = resource_name
|
||||
return event_backend.put_rule(name=event_name, **properties)
|
||||
|
||||
@classmethod
|
||||
@ -104,9 +104,8 @@ class Rule(CloudFormationModel):
|
||||
def delete_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
event_backend = events_backends[region_name]
|
||||
event_name = properties.get("Name") or resource_name
|
||||
event_name = resource_name
|
||||
event_backend.delete_rule(name=event_name)
|
||||
|
||||
|
||||
@ -176,7 +175,7 @@ class EventBus(CloudFormationModel):
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
event_backend = events_backends[region_name]
|
||||
event_name = properties["Name"]
|
||||
event_name = resource_name
|
||||
event_source_name = properties.get("EventSourceName")
|
||||
return event_backend.create_event_bus(
|
||||
name=event_name, event_source_name=event_source_name
|
||||
@ -195,9 +194,8 @@ class EventBus(CloudFormationModel):
|
||||
def delete_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
event_backend = events_backends[region_name]
|
||||
event_bus_name = properties["Name"]
|
||||
event_bus_name = resource_name
|
||||
event_backend.delete_event_bus(event_bus_name)
|
||||
|
||||
|
||||
|
@ -12,7 +12,6 @@ import re
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
from moto.core.exceptions import RESTError
|
||||
from moto.core import BaseBackend, BaseModel, ACCOUNT_ID, CloudFormationModel
|
||||
@ -84,7 +83,11 @@ class VirtualMfaDevice(object):
|
||||
return iso_8601_datetime_without_milliseconds(self.enable_date)
|
||||
|
||||
|
||||
class Policy(BaseModel):
|
||||
class Policy(CloudFormationModel):
|
||||
|
||||
# Note: This class does not implement the CloudFormation support for AWS::IAM::Policy, as that CF resource
|
||||
# is for creating *inline* policies. That is done in class InlinePolicy.
|
||||
|
||||
is_attachable = False
|
||||
|
||||
def __init__(
|
||||
@ -295,8 +298,149 @@ aws_managed_policies = [
|
||||
]
|
||||
|
||||
|
||||
class InlinePolicy(Policy):
|
||||
"""TODO: is this needed?"""
|
||||
class InlinePolicy(CloudFormationModel):
|
||||
# Represents an Inline Policy created by CloudFormation
|
||||
def __init__(
|
||||
self,
|
||||
resource_name,
|
||||
policy_name,
|
||||
policy_document,
|
||||
group_names,
|
||||
role_names,
|
||||
user_names,
|
||||
):
|
||||
self.name = resource_name
|
||||
self.policy_name = None
|
||||
self.policy_document = None
|
||||
self.group_names = None
|
||||
self.role_names = None
|
||||
self.user_names = None
|
||||
self.update(policy_name, policy_document, group_names, role_names, user_names)
|
||||
|
||||
def update(
|
||||
self, policy_name, policy_document, group_names, role_names, user_names,
|
||||
):
|
||||
self.policy_name = policy_name
|
||||
self.policy_document = (
|
||||
json.dumps(policy_document)
|
||||
if isinstance(policy_document, dict)
|
||||
else policy_document
|
||||
)
|
||||
self.group_names = group_names
|
||||
self.role_names = role_names
|
||||
self.user_names = user_names
|
||||
|
||||
@staticmethod
|
||||
def cloudformation_name_type():
|
||||
return None # Resource never gets named after by template PolicyName!
|
||||
|
||||
@staticmethod
|
||||
def cloudformation_type():
|
||||
return "AWS::IAM::Policy"
|
||||
|
||||
@classmethod
|
||||
def create_from_cloudformation_json(
|
||||
cls, resource_physical_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json.get("Properties", {})
|
||||
policy_document = properties.get("PolicyDocument")
|
||||
policy_name = properties.get("PolicyName")
|
||||
user_names = properties.get("Users")
|
||||
role_names = properties.get("Roles")
|
||||
group_names = properties.get("Groups")
|
||||
|
||||
return iam_backend.create_inline_policy(
|
||||
resource_physical_name,
|
||||
policy_name,
|
||||
policy_document,
|
||||
group_names,
|
||||
role_names,
|
||||
user_names,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name,
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
if cls.is_replacement_update(properties):
|
||||
resource_name_property = cls.cloudformation_name_type()
|
||||
if resource_name_property not in properties:
|
||||
properties[resource_name_property] = new_resource_name
|
||||
new_resource = cls.create_from_cloudformation_json(
|
||||
properties[resource_name_property], cloudformation_json, region_name
|
||||
)
|
||||
properties[resource_name_property] = original_resource.name
|
||||
cls.delete_from_cloudformation_json(
|
||||
original_resource.name, cloudformation_json, region_name
|
||||
)
|
||||
return new_resource
|
||||
|
||||
else: # No Interruption
|
||||
properties = cloudformation_json.get("Properties", {})
|
||||
policy_document = properties.get("PolicyDocument")
|
||||
policy_name = properties.get("PolicyName", original_resource.name)
|
||||
user_names = properties.get("Users")
|
||||
role_names = properties.get("Roles")
|
||||
group_names = properties.get("Groups")
|
||||
|
||||
return iam_backend.update_inline_policy(
|
||||
original_resource.name,
|
||||
policy_name,
|
||||
policy_document,
|
||||
group_names,
|
||||
role_names,
|
||||
user_names,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def delete_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
iam_backend.delete_inline_policy(resource_name)
|
||||
|
||||
@staticmethod
|
||||
def is_replacement_update(properties):
|
||||
properties_requiring_replacement_update = []
|
||||
return any(
|
||||
[
|
||||
property_requiring_replacement in properties
|
||||
for property_requiring_replacement in properties_requiring_replacement_update
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def physical_resource_id(self):
|
||||
return self.name
|
||||
|
||||
def apply_policy(self, backend):
|
||||
if self.user_names:
|
||||
for user_name in self.user_names:
|
||||
backend.put_user_policy(
|
||||
user_name, self.policy_name, self.policy_document
|
||||
)
|
||||
if self.role_names:
|
||||
for role_name in self.role_names:
|
||||
backend.put_role_policy(
|
||||
role_name, self.policy_name, self.policy_document
|
||||
)
|
||||
if self.group_names:
|
||||
for group_name in self.group_names:
|
||||
backend.put_group_policy(
|
||||
group_name, self.policy_name, self.policy_document
|
||||
)
|
||||
|
||||
def unapply_policy(self, backend):
|
||||
if self.user_names:
|
||||
for user_name in self.user_names:
|
||||
backend.delete_user_policy(user_name, self.policy_name)
|
||||
if self.role_names:
|
||||
for role_name in self.role_names:
|
||||
backend.delete_role_policy(role_name, self.policy_name)
|
||||
if self.group_names:
|
||||
for group_name in self.group_names:
|
||||
backend.delete_group_policy(group_name, self.policy_name)
|
||||
|
||||
|
||||
class Role(CloudFormationModel):
|
||||
@ -338,11 +482,13 @@ class Role(CloudFormationModel):
|
||||
|
||||
@classmethod
|
||||
def create_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
cls, resource_physical_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
role_name = (
|
||||
properties["RoleName"] if "RoleName" in properties else str(uuid4())[0:5]
|
||||
properties["RoleName"]
|
||||
if "RoleName" in properties
|
||||
else resource_physical_name
|
||||
)
|
||||
|
||||
role = iam_backend.create_role(
|
||||
@ -416,13 +562,15 @@ class InstanceProfile(CloudFormationModel):
|
||||
|
||||
@classmethod
|
||||
def create_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
cls, resource_physical_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
role_ids = properties["Roles"]
|
||||
return iam_backend.create_instance_profile(
|
||||
name=resource_name, path=properties.get("Path", "/"), role_ids=role_ids
|
||||
name=resource_physical_name,
|
||||
path=properties.get("Path", "/"),
|
||||
role_ids=role_ids,
|
||||
)
|
||||
|
||||
@property
|
||||
@ -475,12 +623,12 @@ class SigningCertificate(BaseModel):
|
||||
return iso_8601_datetime_without_milliseconds(self.upload_date)
|
||||
|
||||
|
||||
class AccessKey(BaseModel):
|
||||
def __init__(self, user_name):
|
||||
class AccessKey(CloudFormationModel):
|
||||
def __init__(self, user_name, status="Active"):
|
||||
self.user_name = user_name
|
||||
self.access_key_id = "AKIA" + random_access_key()
|
||||
self.secret_access_key = random_alphanumeric(40)
|
||||
self.status = "Active"
|
||||
self.status = status
|
||||
self.create_date = datetime.utcnow()
|
||||
self.last_used = None
|
||||
|
||||
@ -499,6 +647,66 @@ class AccessKey(BaseModel):
|
||||
return self.secret_access_key
|
||||
raise UnformattedGetAttTemplateException()
|
||||
|
||||
@staticmethod
|
||||
def cloudformation_name_type():
|
||||
return None # Resource never gets named after by template PolicyName!
|
||||
|
||||
@staticmethod
|
||||
def cloudformation_type():
|
||||
return "AWS::IAM::AccessKey"
|
||||
|
||||
@classmethod
|
||||
def create_from_cloudformation_json(
|
||||
cls, resource_physical_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json.get("Properties", {})
|
||||
user_name = properties.get("UserName")
|
||||
status = properties.get("Status", "Active")
|
||||
|
||||
return iam_backend.create_access_key(user_name, status=status,)
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name,
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
if cls.is_replacement_update(properties):
|
||||
new_resource = cls.create_from_cloudformation_json(
|
||||
new_resource_name, cloudformation_json, region_name
|
||||
)
|
||||
cls.delete_from_cloudformation_json(
|
||||
original_resource.physical_resource_id, cloudformation_json, region_name
|
||||
)
|
||||
return new_resource
|
||||
|
||||
else: # No Interruption
|
||||
properties = cloudformation_json.get("Properties", {})
|
||||
status = properties.get("Status")
|
||||
return iam_backend.update_access_key(
|
||||
original_resource.user_name, original_resource.access_key_id, status
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def delete_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
iam_backend.delete_access_key_by_name(resource_name)
|
||||
|
||||
@staticmethod
|
||||
def is_replacement_update(properties):
|
||||
properties_requiring_replacement_update = ["Serial", "UserName"]
|
||||
return any(
|
||||
[
|
||||
property_requiring_replacement in properties
|
||||
for property_requiring_replacement in properties_requiring_replacement_update
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def physical_resource_id(self):
|
||||
return self.access_key_id
|
||||
|
||||
|
||||
class SshPublicKey(BaseModel):
|
||||
def __init__(self, user_name, ssh_public_key_body):
|
||||
@ -564,8 +772,14 @@ class Group(BaseModel):
|
||||
def list_policies(self):
|
||||
return self.policies.keys()
|
||||
|
||||
def delete_policy(self, policy_name):
|
||||
if policy_name not in self.policies:
|
||||
raise IAMNotFoundException("Policy {0} not found".format(policy_name))
|
||||
|
||||
class User(BaseModel):
|
||||
del self.policies[policy_name]
|
||||
|
||||
|
||||
class User(CloudFormationModel):
|
||||
def __init__(self, name, path=None, tags=None):
|
||||
self.name = name
|
||||
self.id = random_resource_id()
|
||||
@ -614,8 +828,8 @@ class User(BaseModel):
|
||||
|
||||
del self.policies[policy_name]
|
||||
|
||||
def create_access_key(self):
|
||||
access_key = AccessKey(self.name)
|
||||
def create_access_key(self, status="Active"):
|
||||
access_key = AccessKey(self.name, status)
|
||||
self.access_keys.append(access_key)
|
||||
return access_key
|
||||
|
||||
@ -633,9 +847,11 @@ class User(BaseModel):
|
||||
key = self.get_access_key_by_id(access_key_id)
|
||||
self.access_keys.remove(key)
|
||||
|
||||
def update_access_key(self, access_key_id, status):
|
||||
def update_access_key(self, access_key_id, status=None):
|
||||
key = self.get_access_key_by_id(access_key_id)
|
||||
key.status = status
|
||||
if status is not None:
|
||||
key.status = status
|
||||
return key
|
||||
|
||||
def get_access_key_by_id(self, access_key_id):
|
||||
for key in self.access_keys:
|
||||
@ -646,6 +862,15 @@ class User(BaseModel):
|
||||
"The Access Key with id {0} cannot be found".format(access_key_id)
|
||||
)
|
||||
|
||||
def has_access_key(self, access_key_id):
|
||||
return any(
|
||||
[
|
||||
access_key
|
||||
for access_key in self.access_keys
|
||||
if access_key.access_key_id == access_key_id
|
||||
]
|
||||
)
|
||||
|
||||
def upload_ssh_public_key(self, ssh_public_key_body):
|
||||
pubkey = SshPublicKey(self.name, ssh_public_key_body)
|
||||
self.ssh_public_keys.append(pubkey)
|
||||
@ -677,7 +902,7 @@ class User(BaseModel):
|
||||
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
||||
|
||||
if attribute_name == "Arn":
|
||||
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "Arn" ]"')
|
||||
return self.arn
|
||||
raise UnformattedGetAttTemplateException()
|
||||
|
||||
def to_csv(self):
|
||||
@ -752,6 +977,66 @@ class User(BaseModel):
|
||||
access_key_2_last_used,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def cloudformation_name_type():
|
||||
return "UserName"
|
||||
|
||||
@staticmethod
|
||||
def cloudformation_type():
|
||||
return "AWS::IAM::User"
|
||||
|
||||
@classmethod
|
||||
def create_from_cloudformation_json(
|
||||
cls, resource_physical_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json.get("Properties", {})
|
||||
path = properties.get("Path")
|
||||
return iam_backend.create_user(resource_physical_name, path)
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name,
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
if cls.is_replacement_update(properties):
|
||||
resource_name_property = cls.cloudformation_name_type()
|
||||
if resource_name_property not in properties:
|
||||
properties[resource_name_property] = new_resource_name
|
||||
new_resource = cls.create_from_cloudformation_json(
|
||||
properties[resource_name_property], cloudformation_json, region_name
|
||||
)
|
||||
properties[resource_name_property] = original_resource.name
|
||||
cls.delete_from_cloudformation_json(
|
||||
original_resource.name, cloudformation_json, region_name
|
||||
)
|
||||
return new_resource
|
||||
|
||||
else: # No Interruption
|
||||
if "Path" in properties:
|
||||
original_resource.path = properties["Path"]
|
||||
return original_resource
|
||||
|
||||
@classmethod
|
||||
def delete_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
iam_backend.delete_user(resource_name)
|
||||
|
||||
@staticmethod
|
||||
def is_replacement_update(properties):
|
||||
properties_requiring_replacement_update = ["UserName"]
|
||||
return any(
|
||||
[
|
||||
property_requiring_replacement in properties
|
||||
for property_requiring_replacement in properties_requiring_replacement_update
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def physical_resource_id(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class AccountPasswordPolicy(BaseModel):
|
||||
def __init__(
|
||||
@ -984,6 +1269,8 @@ class IAMBackend(BaseBackend):
|
||||
self.virtual_mfa_devices = {}
|
||||
self.account_password_policy = None
|
||||
self.account_summary = AccountSummary(self)
|
||||
self.inline_policies = {}
|
||||
self.access_keys = {}
|
||||
super(IAMBackend, self).__init__()
|
||||
|
||||
def _init_managed_policies(self):
|
||||
@ -1478,6 +1765,10 @@ class IAMBackend(BaseBackend):
|
||||
group = self.get_group(group_name)
|
||||
return group.list_policies()
|
||||
|
||||
def delete_group_policy(self, group_name, policy_name):
|
||||
group = self.get_group(group_name)
|
||||
group.delete_policy(policy_name)
|
||||
|
||||
def get_group_policy(self, group_name, policy_name):
|
||||
group = self.get_group(group_name)
|
||||
return group.get_policy(policy_name)
|
||||
@ -1674,14 +1965,15 @@ class IAMBackend(BaseBackend):
|
||||
def delete_policy(self, policy_arn):
|
||||
del self.managed_policies[policy_arn]
|
||||
|
||||
def create_access_key(self, user_name=None):
|
||||
def create_access_key(self, user_name=None, status="Active"):
|
||||
user = self.get_user(user_name)
|
||||
key = user.create_access_key()
|
||||
key = user.create_access_key(status)
|
||||
self.access_keys[key.physical_resource_id] = key
|
||||
return key
|
||||
|
||||
def update_access_key(self, user_name, access_key_id, status):
|
||||
def update_access_key(self, user_name, access_key_id, status=None):
|
||||
user = self.get_user(user_name)
|
||||
user.update_access_key(access_key_id, status)
|
||||
return user.update_access_key(access_key_id, status)
|
||||
|
||||
def get_access_key_last_used(self, access_key_id):
|
||||
access_keys_list = self.get_all_access_keys_for_all_users()
|
||||
@ -1706,7 +1998,17 @@ class IAMBackend(BaseBackend):
|
||||
|
||||
def delete_access_key(self, access_key_id, user_name):
|
||||
user = self.get_user(user_name)
|
||||
user.delete_access_key(access_key_id)
|
||||
access_key = user.get_access_key_by_id(access_key_id)
|
||||
self.delete_access_key_by_name(access_key.access_key_id)
|
||||
|
||||
def delete_access_key_by_name(self, name):
|
||||
key = self.access_keys[name]
|
||||
try: # User may have been deleted before their access key...
|
||||
user = self.get_user(key.user_name)
|
||||
user.delete_access_key(key.access_key_id)
|
||||
except IAMNotFoundException:
|
||||
pass
|
||||
del self.access_keys[name]
|
||||
|
||||
def upload_ssh_public_key(self, user_name, ssh_public_key_body):
|
||||
user = self.get_user(user_name)
|
||||
@ -2017,5 +2319,62 @@ class IAMBackend(BaseBackend):
|
||||
def get_account_summary(self):
|
||||
return self.account_summary
|
||||
|
||||
def create_inline_policy(
|
||||
self,
|
||||
resource_name,
|
||||
policy_name,
|
||||
policy_document,
|
||||
group_names,
|
||||
role_names,
|
||||
user_names,
|
||||
):
|
||||
if resource_name in self.inline_policies:
|
||||
raise IAMConflictException(
|
||||
"EntityAlreadyExists",
|
||||
"Inline Policy {0} already exists".format(resource_name),
|
||||
)
|
||||
|
||||
inline_policy = InlinePolicy(
|
||||
resource_name,
|
||||
policy_name,
|
||||
policy_document,
|
||||
group_names,
|
||||
role_names,
|
||||
user_names,
|
||||
)
|
||||
self.inline_policies[resource_name] = inline_policy
|
||||
inline_policy.apply_policy(self)
|
||||
return inline_policy
|
||||
|
||||
def get_inline_policy(self, policy_id):
|
||||
inline_policy = None
|
||||
try:
|
||||
inline_policy = self.inline_policies[policy_id]
|
||||
except KeyError:
|
||||
raise IAMNotFoundException("Inline policy {0} not found".format(policy_id))
|
||||
return inline_policy
|
||||
|
||||
def update_inline_policy(
|
||||
self,
|
||||
resource_name,
|
||||
policy_name,
|
||||
policy_document,
|
||||
group_names,
|
||||
role_names,
|
||||
user_names,
|
||||
):
|
||||
inline_policy = self.get_inline_policy(resource_name)
|
||||
inline_policy.unapply_policy(self)
|
||||
inline_policy.update(
|
||||
policy_name, policy_document, group_names, role_names, user_names,
|
||||
)
|
||||
inline_policy.apply_policy(self)
|
||||
return inline_policy
|
||||
|
||||
def delete_inline_policy(self, policy_id):
|
||||
inline_policy = self.get_inline_policy(policy_id)
|
||||
inline_policy.unapply_policy(self)
|
||||
del self.inline_policies[policy_id]
|
||||
|
||||
|
||||
iam_backend = IAMBackend()
|
||||
|
@ -20,6 +20,7 @@ from .exceptions import (
|
||||
InvalidStateTransitionException,
|
||||
VersionConflictException,
|
||||
)
|
||||
from moto.utilities.utils import random_string
|
||||
|
||||
|
||||
class FakeThing(BaseModel):
|
||||
@ -374,6 +375,55 @@ class FakeJobExecution(BaseModel):
|
||||
return obj
|
||||
|
||||
|
||||
class FakeEndpoint(BaseModel):
|
||||
def __init__(self, endpoint_type, region_name):
|
||||
if endpoint_type not in [
|
||||
"iot:Data",
|
||||
"iot:Data-ATS",
|
||||
"iot:CredentialProvider",
|
||||
"iot:Jobs",
|
||||
]:
|
||||
raise InvalidRequestException(
|
||||
" An error occurred (InvalidRequestException) when calling the DescribeEndpoint "
|
||||
"operation: Endpoint type %s not recognized." % endpoint_type
|
||||
)
|
||||
self.region_name = region_name
|
||||
data_identifier = random_string(14)
|
||||
if endpoint_type == "iot:Data":
|
||||
self.endpoint = "{i}.iot.{r}.amazonaws.com".format(
|
||||
i=data_identifier, r=self.region_name
|
||||
)
|
||||
elif "iot:Data-ATS" in endpoint_type:
|
||||
self.endpoint = "{i}-ats.iot.{r}.amazonaws.com".format(
|
||||
i=data_identifier, r=self.region_name
|
||||
)
|
||||
elif "iot:CredentialProvider" in endpoint_type:
|
||||
identifier = random_string(14)
|
||||
self.endpoint = "{i}.credentials.iot.{r}.amazonaws.com".format(
|
||||
i=identifier, r=self.region_name
|
||||
)
|
||||
elif "iot:Jobs" in endpoint_type:
|
||||
identifier = random_string(14)
|
||||
self.endpoint = "{i}.jobs.iot.{r}.amazonaws.com".format(
|
||||
i=identifier, r=self.region_name
|
||||
)
|
||||
self.endpoint_type = endpoint_type
|
||||
|
||||
def to_get_dict(self):
|
||||
obj = {
|
||||
"endpointAddress": self.endpoint,
|
||||
}
|
||||
|
||||
return obj
|
||||
|
||||
def to_dict(self):
|
||||
obj = {
|
||||
"endpointAddress": self.endpoint,
|
||||
}
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
class IoTBackend(BaseBackend):
|
||||
def __init__(self, region_name=None):
|
||||
super(IoTBackend, self).__init__()
|
||||
@ -387,6 +437,7 @@ class IoTBackend(BaseBackend):
|
||||
self.policies = OrderedDict()
|
||||
self.principal_policies = OrderedDict()
|
||||
self.principal_things = OrderedDict()
|
||||
self.endpoint = None
|
||||
|
||||
def reset(self):
|
||||
region_name = self.region_name
|
||||
@ -495,6 +546,10 @@ class IoTBackend(BaseBackend):
|
||||
raise ResourceNotFoundException()
|
||||
return thing_types[0]
|
||||
|
||||
def describe_endpoint(self, endpoint_type):
|
||||
self.endpoint = FakeEndpoint(endpoint_type, self.region_name)
|
||||
return self.endpoint
|
||||
|
||||
def delete_thing(self, thing_name, expected_version):
|
||||
# TODO: handle expected_version
|
||||
|
||||
@ -625,6 +680,11 @@ class IoTBackend(BaseBackend):
|
||||
self.certificates[certificate.certificate_id] = certificate
|
||||
return certificate
|
||||
|
||||
def register_certificate_without_ca(self, certificate_pem, status):
|
||||
certificate = FakeCertificate(certificate_pem, status, self.region_name)
|
||||
self.certificates[certificate.certificate_id] = certificate
|
||||
return certificate
|
||||
|
||||
def update_certificate(self, certificate_id, new_status):
|
||||
cert = self.describe_certificate(certificate_id)
|
||||
# TODO: validate new_status
|
||||
|
@ -88,6 +88,11 @@ class IoTResponse(BaseResponse):
|
||||
)
|
||||
return json.dumps(thing_type.to_dict())
|
||||
|
||||
def describe_endpoint(self):
|
||||
endpoint_type = self._get_param("endpointType")
|
||||
endpoint = self.iot_backend.describe_endpoint(endpoint_type=endpoint_type)
|
||||
return json.dumps(endpoint.to_dict())
|
||||
|
||||
def delete_thing(self):
|
||||
thing_name = self._get_param("thingName")
|
||||
expected_version = self._get_param("expectedVersion")
|
||||
@ -330,6 +335,17 @@ class IoTResponse(BaseResponse):
|
||||
dict(certificateId=cert.certificate_id, certificateArn=cert.arn)
|
||||
)
|
||||
|
||||
def register_certificate_without_ca(self):
|
||||
certificate_pem = self._get_param("certificatePem")
|
||||
status = self._get_param("status")
|
||||
|
||||
cert = self.iot_backend.register_certificate_without_ca(
|
||||
certificate_pem=certificate_pem, status=status,
|
||||
)
|
||||
return json.dumps(
|
||||
dict(certificateId=cert.certificate_id, certificateArn=cert.arn)
|
||||
)
|
||||
|
||||
def update_certificate(self):
|
||||
certificate_id = self._get_param("certificateId")
|
||||
new_status = self._get_param("newStatus")
|
||||
|
@ -135,7 +135,7 @@ class Shard(BaseModel):
|
||||
|
||||
|
||||
class Stream(CloudFormationModel):
|
||||
def __init__(self, stream_name, shard_count, region_name):
|
||||
def __init__(self, stream_name, shard_count, retention_period_hours, region_name):
|
||||
self.stream_name = stream_name
|
||||
self.creation_datetime = datetime.datetime.now()
|
||||
self.region = region_name
|
||||
@ -145,6 +145,7 @@ class Stream(CloudFormationModel):
|
||||
self.status = "ACTIVE"
|
||||
self.shard_count = None
|
||||
self.update_shard_count(shard_count)
|
||||
self.retention_period_hours = retention_period_hours
|
||||
|
||||
def update_shard_count(self, shard_count):
|
||||
# ToDo: This was extracted from init. It's only accurate for new streams.
|
||||
@ -213,6 +214,7 @@ class Stream(CloudFormationModel):
|
||||
"StreamName": self.stream_name,
|
||||
"StreamStatus": self.status,
|
||||
"HasMoreShards": False,
|
||||
"RetentionPeriodHours": self.retention_period_hours,
|
||||
"Shards": [shard.to_json() for shard in self.shards.values()],
|
||||
}
|
||||
}
|
||||
@ -243,9 +245,19 @@ class Stream(CloudFormationModel):
|
||||
):
|
||||
properties = cloudformation_json.get("Properties", {})
|
||||
shard_count = properties.get("ShardCount", 1)
|
||||
name = properties.get("Name", resource_name)
|
||||
retention_period_hours = properties.get("RetentionPeriodHours", resource_name)
|
||||
tags = {
|
||||
tag_item["Key"]: tag_item["Value"]
|
||||
for tag_item in properties.get("Tags", [])
|
||||
}
|
||||
|
||||
backend = kinesis_backends[region_name]
|
||||
return backend.create_stream(name, shard_count, region_name)
|
||||
stream = backend.create_stream(
|
||||
resource_name, shard_count, retention_period_hours, region_name
|
||||
)
|
||||
if any(tags):
|
||||
backend.add_tags_to_stream(stream.stream_name, tags)
|
||||
return stream
|
||||
|
||||
@classmethod
|
||||
def update_from_cloudformation_json(
|
||||
@ -269,6 +281,15 @@ class Stream(CloudFormationModel):
|
||||
else: # No Interruption
|
||||
if "ShardCount" in properties:
|
||||
original_resource.update_shard_count(properties["ShardCount"])
|
||||
if "RetentionPeriodHours" in properties:
|
||||
original_resource.retention_period_hours = properties[
|
||||
"RetentionPeriodHours"
|
||||
]
|
||||
if "Tags" in properties:
|
||||
original_resource.tags = {
|
||||
tag_item["Key"]: tag_item["Value"]
|
||||
for tag_item in properties.get("Tags", [])
|
||||
}
|
||||
return original_resource
|
||||
|
||||
@classmethod
|
||||
@ -276,9 +297,7 @@ class Stream(CloudFormationModel):
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
backend = kinesis_backends[region_name]
|
||||
properties = cloudformation_json.get("Properties", {})
|
||||
stream_name = properties.get(cls.cloudformation_name_type(), resource_name)
|
||||
backend.delete_stream(stream_name)
|
||||
backend.delete_stream(resource_name)
|
||||
|
||||
@staticmethod
|
||||
def is_replacement_update(properties):
|
||||
@ -398,10 +417,12 @@ class KinesisBackend(BaseBackend):
|
||||
self.streams = OrderedDict()
|
||||
self.delivery_streams = {}
|
||||
|
||||
def create_stream(self, stream_name, shard_count, region_name):
|
||||
def create_stream(
|
||||
self, stream_name, shard_count, retention_period_hours, region_name
|
||||
):
|
||||
if stream_name in self.streams:
|
||||
raise ResourceInUseError(stream_name)
|
||||
stream = Stream(stream_name, shard_count, region_name)
|
||||
stream = Stream(stream_name, shard_count, retention_period_hours, region_name)
|
||||
self.streams[stream_name] = stream
|
||||
return stream
|
||||
|
||||
|
@ -25,7 +25,10 @@ class KinesisResponse(BaseResponse):
|
||||
def create_stream(self):
|
||||
stream_name = self.parameters.get("StreamName")
|
||||
shard_count = self.parameters.get("ShardCount")
|
||||
self.kinesis_backend.create_stream(stream_name, shard_count, self.region)
|
||||
retention_period_hours = self.parameters.get("RetentionPeriodHours")
|
||||
self.kinesis_backend.create_stream(
|
||||
stream_name, shard_count, retention_period_hours, self.region
|
||||
)
|
||||
return ""
|
||||
|
||||
def describe_stream(self):
|
||||
|
@ -2,7 +2,8 @@ from __future__ import unicode_literals
|
||||
from .responses import KinesisResponse
|
||||
|
||||
url_bases = [
|
||||
"https?://kinesis.(.+).amazonaws.com",
|
||||
# Need to avoid conflicting with kinesisvideo
|
||||
r"https?://kinesis\.(.+).amazonaws.com",
|
||||
"https?://firehose.(.+).amazonaws.com",
|
||||
]
|
||||
|
||||
|
6
moto/kinesisvideo/__init__.py
Normal file
6
moto/kinesisvideo/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
from .models import kinesisvideo_backends
|
||||
from ..core.models import base_decorator
|
||||
|
||||
kinesisvideo_backend = kinesisvideo_backends["us-east-1"]
|
||||
mock_kinesisvideo = base_decorator(kinesisvideo_backends)
|
24
moto/kinesisvideo/exceptions.py
Normal file
24
moto/kinesisvideo/exceptions.py
Normal file
@ -0,0 +1,24 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from moto.core.exceptions import RESTError
|
||||
|
||||
|
||||
class KinesisvideoClientError(RESTError):
|
||||
code = 400
|
||||
|
||||
|
||||
class ResourceNotFoundException(KinesisvideoClientError):
|
||||
def __init__(self):
|
||||
self.code = 404
|
||||
super(ResourceNotFoundException, self).__init__(
|
||||
"ResourceNotFoundException",
|
||||
"The requested stream is not found or not active.",
|
||||
)
|
||||
|
||||
|
||||
class ResourceInUseException(KinesisvideoClientError):
|
||||
def __init__(self, message):
|
||||
self.code = 400
|
||||
super(ResourceInUseException, self).__init__(
|
||||
"ResourceInUseException", message,
|
||||
)
|
147
moto/kinesisvideo/models.py
Normal file
147
moto/kinesisvideo/models.py
Normal file
@ -0,0 +1,147 @@
|
||||
from __future__ import unicode_literals
|
||||
from boto3 import Session
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from datetime import datetime
|
||||
from .exceptions import (
|
||||
ResourceNotFoundException,
|
||||
ResourceInUseException,
|
||||
)
|
||||
import random
|
||||
import string
|
||||
from moto.core.utils import get_random_hex
|
||||
from moto.core import ACCOUNT_ID
|
||||
|
||||
|
||||
class Stream(BaseModel):
|
||||
def __init__(
|
||||
self,
|
||||
region_name,
|
||||
device_name,
|
||||
stream_name,
|
||||
media_type,
|
||||
kms_key_id,
|
||||
data_retention_in_hours,
|
||||
tags,
|
||||
):
|
||||
self.region_name = region_name
|
||||
self.stream_name = stream_name
|
||||
self.device_name = device_name
|
||||
self.media_type = media_type
|
||||
self.kms_key_id = kms_key_id
|
||||
self.data_retention_in_hours = data_retention_in_hours
|
||||
self.tags = tags
|
||||
self.status = "ACTIVE"
|
||||
self.version = self._get_random_string()
|
||||
self.creation_time = datetime.utcnow()
|
||||
stream_arn = "arn:aws:kinesisvideo:{}:{}:stream/{}/1598784211076".format(
|
||||
self.region_name, ACCOUNT_ID, self.stream_name
|
||||
)
|
||||
self.data_endpoint_number = get_random_hex()
|
||||
self.arn = stream_arn
|
||||
|
||||
def _get_random_string(self, length=20):
|
||||
letters = string.ascii_lowercase
|
||||
result_str = "".join([random.choice(letters) for _ in range(length)])
|
||||
return result_str
|
||||
|
||||
def get_data_endpoint(self, api_name):
|
||||
data_endpoint_prefix = "s-" if api_name in ("PUT_MEDIA", "GET_MEDIA") else "b-"
|
||||
return "https://{}{}.kinesisvideo.{}.amazonaws.com".format(
|
||||
data_endpoint_prefix, self.data_endpoint_number, self.region_name
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"DeviceName": self.device_name,
|
||||
"StreamName": self.stream_name,
|
||||
"StreamARN": self.arn,
|
||||
"MediaType": self.media_type,
|
||||
"KmsKeyId": self.kms_key_id,
|
||||
"Version": self.version,
|
||||
"Status": self.status,
|
||||
"CreationTime": self.creation_time.isoformat(),
|
||||
"DataRetentionInHours": self.data_retention_in_hours,
|
||||
}
|
||||
|
||||
|
||||
class KinesisVideoBackend(BaseBackend):
|
||||
def __init__(self, region_name=None):
|
||||
super(KinesisVideoBackend, self).__init__()
|
||||
self.region_name = region_name
|
||||
self.streams = {}
|
||||
|
||||
def reset(self):
|
||||
region_name = self.region_name
|
||||
self.__dict__ = {}
|
||||
self.__init__(region_name)
|
||||
|
||||
def create_stream(
|
||||
self,
|
||||
device_name,
|
||||
stream_name,
|
||||
media_type,
|
||||
kms_key_id,
|
||||
data_retention_in_hours,
|
||||
tags,
|
||||
):
|
||||
streams = [_ for _ in self.streams.values() if _.stream_name == stream_name]
|
||||
if len(streams) > 0:
|
||||
raise ResourceInUseException(
|
||||
"The stream {} already exists.".format(stream_name)
|
||||
)
|
||||
stream = Stream(
|
||||
self.region_name,
|
||||
device_name,
|
||||
stream_name,
|
||||
media_type,
|
||||
kms_key_id,
|
||||
data_retention_in_hours,
|
||||
tags,
|
||||
)
|
||||
self.streams[stream.arn] = stream
|
||||
return stream.arn
|
||||
|
||||
def _get_stream(self, stream_name, stream_arn):
|
||||
if stream_name:
|
||||
streams = [_ for _ in self.streams.values() if _.stream_name == stream_name]
|
||||
if len(streams) == 0:
|
||||
raise ResourceNotFoundException()
|
||||
stream = streams[0]
|
||||
elif stream_arn:
|
||||
stream = self.streams.get(stream_arn)
|
||||
if stream is None:
|
||||
raise ResourceNotFoundException()
|
||||
return stream
|
||||
|
||||
def describe_stream(self, stream_name, stream_arn):
|
||||
stream = self._get_stream(stream_name, stream_arn)
|
||||
stream_info = stream.to_dict()
|
||||
return stream_info
|
||||
|
||||
def list_streams(self, max_results, next_token, stream_name_condition):
|
||||
stream_info_list = [_.to_dict() for _ in self.streams.values()]
|
||||
next_token = None
|
||||
return stream_info_list, next_token
|
||||
|
||||
def delete_stream(self, stream_arn, current_version):
|
||||
stream = self.streams.get(stream_arn)
|
||||
if stream is None:
|
||||
raise ResourceNotFoundException()
|
||||
del self.streams[stream_arn]
|
||||
|
||||
def get_data_endpoint(self, stream_name, stream_arn, api_name):
|
||||
stream = self._get_stream(stream_name, stream_arn)
|
||||
return stream.get_data_endpoint(api_name)
|
||||
|
||||
# add methods from here
|
||||
|
||||
|
||||
kinesisvideo_backends = {}
|
||||
for region in Session().get_available_regions("kinesisvideo"):
|
||||
kinesisvideo_backends[region] = KinesisVideoBackend(region)
|
||||
for region in Session().get_available_regions(
|
||||
"kinesisvideo", partition_name="aws-us-gov"
|
||||
):
|
||||
kinesisvideo_backends[region] = KinesisVideoBackend(region)
|
||||
for region in Session().get_available_regions("kinesisvideo", partition_name="aws-cn"):
|
||||
kinesisvideo_backends[region] = KinesisVideoBackend(region)
|
65
moto/kinesisvideo/responses.py
Normal file
65
moto/kinesisvideo/responses.py
Normal file
@ -0,0 +1,65 @@
|
||||
from __future__ import unicode_literals
|
||||
from moto.core.responses import BaseResponse
|
||||
from .models import kinesisvideo_backends
|
||||
import json
|
||||
|
||||
|
||||
class KinesisVideoResponse(BaseResponse):
|
||||
SERVICE_NAME = "kinesisvideo"
|
||||
|
||||
@property
|
||||
def kinesisvideo_backend(self):
|
||||
return kinesisvideo_backends[self.region]
|
||||
|
||||
def create_stream(self):
|
||||
device_name = self._get_param("DeviceName")
|
||||
stream_name = self._get_param("StreamName")
|
||||
media_type = self._get_param("MediaType")
|
||||
kms_key_id = self._get_param("KmsKeyId")
|
||||
data_retention_in_hours = self._get_int_param("DataRetentionInHours")
|
||||
tags = self._get_param("Tags")
|
||||
stream_arn = self.kinesisvideo_backend.create_stream(
|
||||
device_name=device_name,
|
||||
stream_name=stream_name,
|
||||
media_type=media_type,
|
||||
kms_key_id=kms_key_id,
|
||||
data_retention_in_hours=data_retention_in_hours,
|
||||
tags=tags,
|
||||
)
|
||||
return json.dumps(dict(StreamARN=stream_arn))
|
||||
|
||||
def describe_stream(self):
|
||||
stream_name = self._get_param("StreamName")
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
stream_info = self.kinesisvideo_backend.describe_stream(
|
||||
stream_name=stream_name, stream_arn=stream_arn,
|
||||
)
|
||||
return json.dumps(dict(StreamInfo=stream_info))
|
||||
|
||||
def list_streams(self):
|
||||
max_results = self._get_int_param("MaxResults")
|
||||
next_token = self._get_param("NextToken")
|
||||
stream_name_condition = self._get_param("StreamNameCondition")
|
||||
stream_info_list, next_token = self.kinesisvideo_backend.list_streams(
|
||||
max_results=max_results,
|
||||
next_token=next_token,
|
||||
stream_name_condition=stream_name_condition,
|
||||
)
|
||||
return json.dumps(dict(StreamInfoList=stream_info_list, NextToken=next_token))
|
||||
|
||||
def delete_stream(self):
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
current_version = self._get_param("CurrentVersion")
|
||||
self.kinesisvideo_backend.delete_stream(
|
||||
stream_arn=stream_arn, current_version=current_version,
|
||||
)
|
||||
return json.dumps(dict())
|
||||
|
||||
def get_data_endpoint(self):
|
||||
stream_name = self._get_param("StreamName")
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
api_name = self._get_param("APIName")
|
||||
data_endpoint = self.kinesisvideo_backend.get_data_endpoint(
|
||||
stream_name=stream_name, stream_arn=stream_arn, api_name=api_name,
|
||||
)
|
||||
return json.dumps(dict(DataEndpoint=data_endpoint))
|
18
moto/kinesisvideo/urls.py
Normal file
18
moto/kinesisvideo/urls.py
Normal file
@ -0,0 +1,18 @@
|
||||
from __future__ import unicode_literals
|
||||
from .responses import KinesisVideoResponse
|
||||
|
||||
url_bases = [
|
||||
"https?://kinesisvideo.(.+).amazonaws.com",
|
||||
]
|
||||
|
||||
|
||||
response = KinesisVideoResponse()
|
||||
|
||||
|
||||
url_paths = {
|
||||
"{0}/createStream$": response.dispatch,
|
||||
"{0}/describeStream$": response.dispatch,
|
||||
"{0}/deleteStream$": response.dispatch,
|
||||
"{0}/listStreams$": response.dispatch,
|
||||
"{0}/getDataEndpoint$": response.dispatch,
|
||||
}
|
6
moto/kinesisvideoarchivedmedia/__init__.py
Normal file
6
moto/kinesisvideoarchivedmedia/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
from .models import kinesisvideoarchivedmedia_backends
|
||||
from ..core.models import base_decorator
|
||||
|
||||
kinesisvideoarchivedmedia_backend = kinesisvideoarchivedmedia_backends["us-east-1"]
|
||||
mock_kinesisvideoarchivedmedia = base_decorator(kinesisvideoarchivedmedia_backends)
|
3
moto/kinesisvideoarchivedmedia/exceptions.py
Normal file
3
moto/kinesisvideoarchivedmedia/exceptions.py
Normal file
@ -0,0 +1,3 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Not implemented exceptions for now
|
88
moto/kinesisvideoarchivedmedia/models.py
Normal file
88
moto/kinesisvideoarchivedmedia/models.py
Normal file
@ -0,0 +1,88 @@
|
||||
from __future__ import unicode_literals
|
||||
from boto3 import Session
|
||||
from moto.core import BaseBackend
|
||||
from moto.kinesisvideo import kinesisvideo_backends
|
||||
from moto.sts.utils import random_session_token
|
||||
|
||||
|
||||
class KinesisVideoArchivedMediaBackend(BaseBackend):
|
||||
def __init__(self, region_name=None):
|
||||
super(KinesisVideoArchivedMediaBackend, self).__init__()
|
||||
self.region_name = region_name
|
||||
|
||||
def reset(self):
|
||||
region_name = self.region_name
|
||||
self.__dict__ = {}
|
||||
self.__init__(region_name)
|
||||
|
||||
def _get_streaming_url(self, stream_name, stream_arn, api_name):
|
||||
stream = kinesisvideo_backends[self.region_name]._get_stream(
|
||||
stream_name, stream_arn
|
||||
)
|
||||
data_endpoint = stream.get_data_endpoint(api_name)
|
||||
session_token = random_session_token()
|
||||
api_to_relative_path = {
|
||||
"GET_HLS_STREAMING_SESSION_URL": "/hls/v1/getHLSMasterPlaylist.m3u8",
|
||||
"GET_DASH_STREAMING_SESSION_URL": "/dash/v1/getDASHManifest.mpd",
|
||||
}
|
||||
relative_path = api_to_relative_path[api_name]
|
||||
url = "{}{}?SessionToken={}".format(data_endpoint, relative_path, session_token)
|
||||
return url
|
||||
|
||||
def get_hls_streaming_session_url(
|
||||
self,
|
||||
stream_name,
|
||||
stream_arn,
|
||||
playback_mode,
|
||||
hls_fragment_selector,
|
||||
container_format,
|
||||
discontinuity_mode,
|
||||
display_fragment_timestamp,
|
||||
expires,
|
||||
max_media_playlist_fragment_results,
|
||||
):
|
||||
# Ignore option paramters as the format of hls_url does't depends on them
|
||||
api_name = "GET_HLS_STREAMING_SESSION_URL"
|
||||
url = self._get_streaming_url(stream_name, stream_arn, api_name)
|
||||
return url
|
||||
|
||||
def get_dash_streaming_session_url(
|
||||
self,
|
||||
stream_name,
|
||||
stream_arn,
|
||||
playback_mode,
|
||||
display_fragment_timestamp,
|
||||
display_fragment_number,
|
||||
dash_fragment_selector,
|
||||
expires,
|
||||
max_manifest_fragment_results,
|
||||
):
|
||||
# Ignore option paramters as the format of hls_url does't depends on them
|
||||
api_name = "GET_DASH_STREAMING_SESSION_URL"
|
||||
url = self._get_streaming_url(stream_name, stream_arn, api_name)
|
||||
return url
|
||||
|
||||
def get_clip(self, stream_name, stream_arn, clip_fragment_selector):
|
||||
kinesisvideo_backends[self.region_name]._get_stream(stream_name, stream_arn)
|
||||
content_type = "video/mp4" # Fixed content_type as it depends on input stream
|
||||
payload = b"sample-mp4-video"
|
||||
return content_type, payload
|
||||
|
||||
|
||||
kinesisvideoarchivedmedia_backends = {}
|
||||
for region in Session().get_available_regions("kinesis-video-archived-media"):
|
||||
kinesisvideoarchivedmedia_backends[region] = KinesisVideoArchivedMediaBackend(
|
||||
region
|
||||
)
|
||||
for region in Session().get_available_regions(
|
||||
"kinesis-video-archived-media", partition_name="aws-us-gov"
|
||||
):
|
||||
kinesisvideoarchivedmedia_backends[region] = KinesisVideoArchivedMediaBackend(
|
||||
region
|
||||
)
|
||||
for region in Session().get_available_regions(
|
||||
"kinesis-video-archived-media", partition_name="aws-cn"
|
||||
):
|
||||
kinesisvideoarchivedmedia_backends[region] = KinesisVideoArchivedMediaBackend(
|
||||
region
|
||||
)
|
70
moto/kinesisvideoarchivedmedia/responses.py
Normal file
70
moto/kinesisvideoarchivedmedia/responses.py
Normal file
@ -0,0 +1,70 @@
|
||||
from __future__ import unicode_literals
|
||||
from moto.core.responses import BaseResponse
|
||||
from .models import kinesisvideoarchivedmedia_backends
|
||||
import json
|
||||
|
||||
|
||||
class KinesisVideoArchivedMediaResponse(BaseResponse):
|
||||
SERVICE_NAME = "kinesis-video-archived-media"
|
||||
|
||||
@property
|
||||
def kinesisvideoarchivedmedia_backend(self):
|
||||
return kinesisvideoarchivedmedia_backends[self.region]
|
||||
|
||||
def get_hls_streaming_session_url(self):
|
||||
stream_name = self._get_param("StreamName")
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
playback_mode = self._get_param("PlaybackMode")
|
||||
hls_fragment_selector = self._get_param("HLSFragmentSelector")
|
||||
container_format = self._get_param("ContainerFormat")
|
||||
discontinuity_mode = self._get_param("DiscontinuityMode")
|
||||
display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp")
|
||||
expires = self._get_int_param("Expires")
|
||||
max_media_playlist_fragment_results = self._get_param(
|
||||
"MaxMediaPlaylistFragmentResults"
|
||||
)
|
||||
hls_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_hls_streaming_session_url(
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
playback_mode=playback_mode,
|
||||
hls_fragment_selector=hls_fragment_selector,
|
||||
container_format=container_format,
|
||||
discontinuity_mode=discontinuity_mode,
|
||||
display_fragment_timestamp=display_fragment_timestamp,
|
||||
expires=expires,
|
||||
max_media_playlist_fragment_results=max_media_playlist_fragment_results,
|
||||
)
|
||||
return json.dumps(dict(HLSStreamingSessionURL=hls_streaming_session_url))
|
||||
|
||||
def get_dash_streaming_session_url(self):
|
||||
stream_name = self._get_param("StreamName")
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
playback_mode = self._get_param("PlaybackMode")
|
||||
display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp")
|
||||
display_fragment_number = self._get_param("DisplayFragmentNumber")
|
||||
dash_fragment_selector = self._get_param("DASHFragmentSelector")
|
||||
expires = self._get_int_param("Expires")
|
||||
max_manifest_fragment_results = self._get_param("MaxManifestFragmentResults")
|
||||
dash_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_dash_streaming_session_url(
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
playback_mode=playback_mode,
|
||||
display_fragment_timestamp=display_fragment_timestamp,
|
||||
display_fragment_number=display_fragment_number,
|
||||
dash_fragment_selector=dash_fragment_selector,
|
||||
expires=expires,
|
||||
max_manifest_fragment_results=max_manifest_fragment_results,
|
||||
)
|
||||
return json.dumps(dict(DASHStreamingSessionURL=dash_streaming_session_url))
|
||||
|
||||
def get_clip(self):
|
||||
stream_name = self._get_param("StreamName")
|
||||
stream_arn = self._get_param("StreamARN")
|
||||
clip_fragment_selector = self._get_param("ClipFragmentSelector")
|
||||
content_type, payload = self.kinesisvideoarchivedmedia_backend.get_clip(
|
||||
stream_name=stream_name,
|
||||
stream_arn=stream_arn,
|
||||
clip_fragment_selector=clip_fragment_selector,
|
||||
)
|
||||
new_headers = {"Content-Type": content_type}
|
||||
return payload, new_headers
|
14
moto/kinesisvideoarchivedmedia/urls.py
Normal file
14
moto/kinesisvideoarchivedmedia/urls.py
Normal file
@ -0,0 +1,14 @@
|
||||
from __future__ import unicode_literals
|
||||
from .responses import KinesisVideoArchivedMediaResponse
|
||||
|
||||
url_bases = [
|
||||
r"https?://.*\.kinesisvideo.(.+).amazonaws.com",
|
||||
]
|
||||
|
||||
|
||||
response = KinesisVideoArchivedMediaResponse()
|
||||
|
||||
|
||||
url_paths = {
|
||||
"{0}/.*$": response.dispatch,
|
||||
}
|
@ -4,7 +4,6 @@ import boto.rds
|
||||
from jinja2 import Template
|
||||
|
||||
from moto.core import BaseBackend, CloudFormationModel
|
||||
from moto.core.utils import get_random_hex
|
||||
from moto.ec2.models import ec2_backends
|
||||
from moto.rds.exceptions import UnformattedGetAttTemplateException
|
||||
from moto.rds2.models import rds2_backends
|
||||
@ -33,9 +32,6 @@ class Database(CloudFormationModel):
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
db_instance_identifier = properties.get(cls.cloudformation_name_type())
|
||||
if not db_instance_identifier:
|
||||
db_instance_identifier = resource_name.lower() + get_random_hex(12)
|
||||
db_security_groups = properties.get("DBSecurityGroups")
|
||||
if not db_security_groups:
|
||||
db_security_groups = []
|
||||
@ -48,7 +44,7 @@ class Database(CloudFormationModel):
|
||||
"availability_zone": properties.get("AvailabilityZone"),
|
||||
"backup_retention_period": properties.get("BackupRetentionPeriod"),
|
||||
"db_instance_class": properties.get("DBInstanceClass"),
|
||||
"db_instance_identifier": db_instance_identifier,
|
||||
"db_instance_identifier": resource_name,
|
||||
"db_name": properties.get("DBName"),
|
||||
"db_subnet_group_name": db_subnet_group_name,
|
||||
"engine": properties.get("Engine"),
|
||||
@ -229,7 +225,7 @@ class SecurityGroup(CloudFormationModel):
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
group_name = resource_name.lower() + get_random_hex(12)
|
||||
group_name = resource_name.lower()
|
||||
description = properties["GroupDescription"]
|
||||
security_group_ingress_rules = properties.get("DBSecurityGroupIngress", [])
|
||||
tags = properties.get("Tags")
|
||||
@ -303,9 +299,7 @@ class SubnetGroup(CloudFormationModel):
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
subnet_name = properties.get(cls.cloudformation_name_type())
|
||||
if not subnet_name:
|
||||
subnet_name = resource_name.lower() + get_random_hex(12)
|
||||
subnet_name = resource_name.lower()
|
||||
description = properties["DBSubnetGroupDescription"]
|
||||
subnet_ids = properties["SubnetIds"]
|
||||
tags = properties.get("Tags")
|
||||
|
@ -10,7 +10,6 @@ from jinja2 import Template
|
||||
from re import compile as re_compile
|
||||
from moto.compat import OrderedDict
|
||||
from moto.core import BaseBackend, BaseModel, CloudFormationModel
|
||||
from moto.core.utils import get_random_hex
|
||||
from moto.core.utils import iso_8601_datetime_with_milliseconds
|
||||
from moto.ec2.models import ec2_backends
|
||||
from .exceptions import (
|
||||
@ -371,9 +370,6 @@ class Database(CloudFormationModel):
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
db_instance_identifier = properties.get(cls.cloudformation_name_type())
|
||||
if not db_instance_identifier:
|
||||
db_instance_identifier = resource_name.lower() + get_random_hex(12)
|
||||
db_security_groups = properties.get("DBSecurityGroups")
|
||||
if not db_security_groups:
|
||||
db_security_groups = []
|
||||
@ -386,7 +382,7 @@ class Database(CloudFormationModel):
|
||||
"availability_zone": properties.get("AvailabilityZone"),
|
||||
"backup_retention_period": properties.get("BackupRetentionPeriod"),
|
||||
"db_instance_class": properties.get("DBInstanceClass"),
|
||||
"db_instance_identifier": db_instance_identifier,
|
||||
"db_instance_identifier": resource_name,
|
||||
"db_name": properties.get("DBName"),
|
||||
"db_subnet_group_name": db_subnet_group_name,
|
||||
"engine": properties.get("Engine"),
|
||||
@ -650,7 +646,7 @@ class SecurityGroup(CloudFormationModel):
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
group_name = resource_name.lower() + get_random_hex(12)
|
||||
group_name = resource_name.lower()
|
||||
description = properties["GroupDescription"]
|
||||
security_group_ingress_rules = properties.get("DBSecurityGroupIngress", [])
|
||||
tags = properties.get("Tags")
|
||||
@ -759,9 +755,6 @@ class SubnetGroup(CloudFormationModel):
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
subnet_name = properties.get(cls.cloudformation_name_type())
|
||||
if not subnet_name:
|
||||
subnet_name = resource_name.lower() + get_random_hex(12)
|
||||
description = properties["DBSubnetGroupDescription"]
|
||||
subnet_ids = properties["SubnetIds"]
|
||||
tags = properties.get("Tags")
|
||||
@ -770,7 +763,7 @@ class SubnetGroup(CloudFormationModel):
|
||||
subnets = [ec2_backend.get_subnet(subnet_id) for subnet_id in subnet_ids]
|
||||
rds2_backend = rds2_backends[region_name]
|
||||
subnet_group = rds2_backend.create_subnet_group(
|
||||
subnet_name, description, subnets, tags
|
||||
resource_name, description, subnets, tags
|
||||
)
|
||||
return subnet_group
|
||||
|
||||
|
@ -298,10 +298,9 @@ class FakeZone(CloudFormationModel):
|
||||
def create_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
name = properties["Name"]
|
||||
|
||||
hosted_zone = route53_backend.create_hosted_zone(name, private_zone=False)
|
||||
hosted_zone = route53_backend.create_hosted_zone(
|
||||
resource_name, private_zone=False
|
||||
)
|
||||
return hosted_zone
|
||||
|
||||
|
||||
|
@ -1086,7 +1086,7 @@ class FakeBucket(CloudFormationModel):
|
||||
):
|
||||
bucket = s3_backend.create_bucket(resource_name, region_name)
|
||||
|
||||
properties = cloudformation_json["Properties"]
|
||||
properties = cloudformation_json.get("Properties", {})
|
||||
|
||||
if "BucketEncryption" in properties:
|
||||
bucket_encryption = cfn_to_api_encryption(properties["BucketEncryption"])
|
||||
@ -1129,9 +1129,7 @@ class FakeBucket(CloudFormationModel):
|
||||
def delete_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
bucket_name = properties[cls.cloudformation_name_type()]
|
||||
s3_backend.delete_bucket(bucket_name)
|
||||
s3_backend.delete_bucket(resource_name)
|
||||
|
||||
def to_config_dict(self):
|
||||
"""Return the AWS Config JSON format of this S3 bucket.
|
||||
|
@ -860,6 +860,10 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
|
||||
new_key = self.backend.set_object(bucket_name, key, f)
|
||||
|
||||
if form.get("acl"):
|
||||
acl = get_canned_acl(form.get("acl"))
|
||||
new_key.set_acl(acl)
|
||||
|
||||
# Metadata
|
||||
metadata = metadata_from_headers(form)
|
||||
new_key.set_metadata(metadata)
|
||||
@ -1092,6 +1096,11 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
else:
|
||||
# Flask server
|
||||
body = request.data
|
||||
# when the data is being passed as a file
|
||||
if request.files and not body:
|
||||
for _, value in request.files.items():
|
||||
body = value.stream.read()
|
||||
|
||||
if body is None:
|
||||
body = b""
|
||||
|
||||
|
@ -104,7 +104,7 @@ class Topic(CloudFormationModel):
|
||||
sns_backend = sns_backends[region_name]
|
||||
properties = cloudformation_json["Properties"]
|
||||
|
||||
topic = sns_backend.create_topic(properties.get(cls.cloudformation_name_type()))
|
||||
topic = sns_backend.create_topic(resource_name)
|
||||
for subscription in properties.get("Subscription", []):
|
||||
sns_backend.subscribe(
|
||||
topic.arn, subscription["Endpoint"], subscription["Protocol"]
|
||||
|
@ -374,10 +374,7 @@ class Queue(CloudFormationModel):
|
||||
|
||||
sqs_backend = sqs_backends[region_name]
|
||||
return sqs_backend.create_queue(
|
||||
name=properties["QueueName"],
|
||||
tags=tags_dict,
|
||||
region=region_name,
|
||||
**properties
|
||||
name=resource_name, tags=tags_dict, region=region_name, **properties
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@ -385,7 +382,7 @@ class Queue(CloudFormationModel):
|
||||
cls, original_resource, new_resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
queue_name = properties["QueueName"]
|
||||
queue_name = original_resource.name
|
||||
|
||||
sqs_backend = sqs_backends[region_name]
|
||||
queue = sqs_backend.get_queue(queue_name)
|
||||
@ -402,10 +399,8 @@ class Queue(CloudFormationModel):
|
||||
def delete_from_cloudformation_json(
|
||||
cls, resource_name, cloudformation_json, region_name
|
||||
):
|
||||
properties = cloudformation_json["Properties"]
|
||||
queue_name = properties["QueueName"]
|
||||
sqs_backend = sqs_backends[region_name]
|
||||
sqs_backend.delete_queue(queue_name)
|
||||
sqs_backend.delete_queue(resource_name)
|
||||
|
||||
@property
|
||||
def approximate_number_of_messages_delayed(self):
|
||||
@ -631,7 +626,8 @@ class SQSBackend(BaseBackend):
|
||||
attributes = queue.attributes
|
||||
else:
|
||||
for name in (name for name in attribute_names if name in queue.attributes):
|
||||
attributes[name] = queue.attributes.get(name)
|
||||
if queue.attributes.get(name) is not None:
|
||||
attributes[name] = queue.attributes.get(name)
|
||||
|
||||
return attributes
|
||||
|
||||
|
@ -70,7 +70,10 @@ class SQSResponse(BaseResponse):
|
||||
def call_action(self):
|
||||
status_code, headers, body = super(SQSResponse, self).call_action()
|
||||
if status_code == 404:
|
||||
return 404, headers, ERROR_INEXISTENT_QUEUE
|
||||
queue_name = self.querystring.get("QueueName", [""])[0]
|
||||
template = self.response_template(ERROR_INEXISTENT_QUEUE)
|
||||
response = template.render(queue_name=queue_name)
|
||||
return 404, headers, response
|
||||
return status_code, headers, body
|
||||
|
||||
def _error(self, code, message, status=400):
|
||||
@ -487,10 +490,12 @@ DELETE_QUEUE_RESPONSE = """<DeleteQueueResponse>
|
||||
GET_QUEUE_ATTRIBUTES_RESPONSE = """<GetQueueAttributesResponse>
|
||||
<GetQueueAttributesResult>
|
||||
{% for key, value in attributes.items() %}
|
||||
<Attribute>
|
||||
<Name>{{ key }}</Name>
|
||||
<Value>{{ value }}</Value>
|
||||
</Attribute>
|
||||
{% if value is not none %}
|
||||
<Attribute>
|
||||
<Name>{{ key }}</Name>
|
||||
<Value>{{ value }}</Value>
|
||||
</Attribute>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</GetQueueAttributesResult>
|
||||
<ResponseMetadata>
|
||||
@ -718,7 +723,11 @@ ERROR_INEXISTENT_QUEUE = """<ErrorResponse xmlns="http://queue.amazonaws.com/doc
|
||||
<Error>
|
||||
<Type>Sender</Type>
|
||||
<Code>AWS.SimpleQueueService.NonExistentQueue</Code>
|
||||
<Message>The specified queue does not exist for this wsdl version.</Message>
|
||||
{% if queue_name %}
|
||||
<Message>The specified queue {{queue_name}} does not exist for this wsdl version.</Message>
|
||||
{% else %}
|
||||
<Message>The specified queue does not exist for this wsdl version.</Message>
|
||||
{% endif %}
|
||||
<Detail/>
|
||||
</Error>
|
||||
<RequestId>b8bc806b-fa6b-53b5-8be8-cfa2f9836bc3</RequestId>
|
||||
|
@ -18,6 +18,11 @@ class AWSError(Exception):
|
||||
)
|
||||
|
||||
|
||||
class ExecutionAlreadyExists(AWSError):
|
||||
TYPE = "ExecutionAlreadyExists"
|
||||
STATUS = 400
|
||||
|
||||
|
||||
class ExecutionDoesNotExist(AWSError):
|
||||
TYPE = "ExecutionDoesNotExist"
|
||||
STATUS = 400
|
||||
@ -33,6 +38,11 @@ class InvalidName(AWSError):
|
||||
STATUS = 400
|
||||
|
||||
|
||||
class InvalidExecutionInput(AWSError):
|
||||
TYPE = "InvalidExecutionInput"
|
||||
STATUS = 400
|
||||
|
||||
|
||||
class StateMachineDoesNotExist(AWSError):
|
||||
TYPE = "StateMachineDoesNotExist"
|
||||
STATUS = 400
|
||||
|
@ -1,3 +1,4 @@
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
@ -8,8 +9,10 @@ from moto.core.utils import iso_8601_datetime_without_milliseconds
|
||||
from moto.sts.models import ACCOUNT_ID
|
||||
from uuid import uuid4
|
||||
from .exceptions import (
|
||||
ExecutionAlreadyExists,
|
||||
ExecutionDoesNotExist,
|
||||
InvalidArn,
|
||||
InvalidExecutionInput,
|
||||
InvalidName,
|
||||
StateMachineDoesNotExist,
|
||||
)
|
||||
@ -33,6 +36,7 @@ class Execution:
|
||||
state_machine_name,
|
||||
execution_name,
|
||||
state_machine_arn,
|
||||
execution_input,
|
||||
):
|
||||
execution_arn = "arn:aws:states:{}:{}:execution:{}:{}"
|
||||
execution_arn = execution_arn.format(
|
||||
@ -42,6 +46,7 @@ class Execution:
|
||||
self.name = execution_name
|
||||
self.start_date = iso_8601_datetime_without_milliseconds(datetime.now())
|
||||
self.state_machine_arn = state_machine_arn
|
||||
self.execution_input = execution_input
|
||||
self.status = "RUNNING"
|
||||
self.stop_date = None
|
||||
|
||||
@ -203,14 +208,17 @@ class StepFunctionBackend(BaseBackend):
|
||||
if sm:
|
||||
self.state_machines.remove(sm)
|
||||
|
||||
def start_execution(self, state_machine_arn, name=None):
|
||||
def start_execution(self, state_machine_arn, name=None, execution_input=None):
|
||||
state_machine_name = self.describe_state_machine(state_machine_arn).name
|
||||
self._ensure_execution_name_doesnt_exist(name)
|
||||
self._validate_execution_input(execution_input)
|
||||
execution = Execution(
|
||||
region_name=self.region_name,
|
||||
account_id=self._get_account_id(),
|
||||
state_machine_name=state_machine_name,
|
||||
execution_name=name or str(uuid4()),
|
||||
state_machine_arn=state_machine_arn,
|
||||
execution_input=execution_input,
|
||||
)
|
||||
self.executions.append(execution)
|
||||
return execution
|
||||
@ -278,6 +286,21 @@ class StepFunctionBackend(BaseBackend):
|
||||
if not arn or not match:
|
||||
raise InvalidArn(invalid_msg)
|
||||
|
||||
def _ensure_execution_name_doesnt_exist(self, name):
|
||||
for execution in self.executions:
|
||||
if execution.name == name:
|
||||
raise ExecutionAlreadyExists(
|
||||
"Execution Already Exists: '" + execution.execution_arn + "'"
|
||||
)
|
||||
|
||||
def _validate_execution_input(self, execution_input):
|
||||
try:
|
||||
json.loads(execution_input)
|
||||
except Exception as ex:
|
||||
raise InvalidExecutionInput(
|
||||
"Invalid State Machine Execution Input: '" + str(ex) + "'"
|
||||
)
|
||||
|
||||
def _get_account_id(self):
|
||||
return ACCOUNT_ID
|
||||
|
||||
|
@ -95,8 +95,11 @@ class StepFunctionResponse(BaseResponse):
|
||||
def start_execution(self):
|
||||
arn = self._get_param("stateMachineArn")
|
||||
name = self._get_param("name")
|
||||
execution_input = self._get_param("input", if_none="{}")
|
||||
try:
|
||||
execution = self.stepfunction_backend.start_execution(arn, name)
|
||||
execution = self.stepfunction_backend.start_execution(
|
||||
arn, name, execution_input
|
||||
)
|
||||
except AWSError as err:
|
||||
return err.response()
|
||||
response = {
|
||||
@ -129,7 +132,7 @@ class StepFunctionResponse(BaseResponse):
|
||||
execution = self.stepfunction_backend.describe_execution(arn)
|
||||
response = {
|
||||
"executionArn": arn,
|
||||
"input": "{}",
|
||||
"input": execution.execution_input,
|
||||
"name": execution.name,
|
||||
"startDate": execution.start_date,
|
||||
"stateMachineArn": execution.state_machine_arn,
|
||||
|
10
moto/utilities/utils.py
Normal file
10
moto/utilities/utils.py
Normal file
@ -0,0 +1,10 @@
|
||||
import random
|
||||
import string
|
||||
|
||||
|
||||
def random_string(length=None):
|
||||
n = length or 20
|
||||
random_str = "".join(
|
||||
[random.choice(string.ascii_letters + string.digits) for i in range(n)]
|
||||
)
|
||||
return random_str
|
@ -1,6 +1,6 @@
|
||||
-r requirements.txt
|
||||
nose
|
||||
black; python_version >= '3.6'
|
||||
black==19.10b0; python_version >= '3.6'
|
||||
regex==2019.11.1; python_version >= '3.6' # Needed for black
|
||||
sure==1.4.11
|
||||
coverage==4.5.4
|
||||
@ -12,7 +12,7 @@ boto3>=1.4.4
|
||||
botocore>=1.15.13
|
||||
six>=1.9
|
||||
parameterized>=0.7.0
|
||||
prompt-toolkit==1.0.14
|
||||
prompt-toolkit==2.0.10 # 3.x is not available with python2
|
||||
click==6.7
|
||||
inflection==0.3.1
|
||||
lxml==4.2.3
|
||||
|
@ -1,2 +1,2 @@
|
||||
# Please add requirements to setup.py
|
||||
-e .
|
||||
-e .[all]
|
||||
|
@ -114,12 +114,12 @@ def append_mock_to_init_py(service):
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
|
||||
if any(_ for _ in lines if re.match('^from.*mock_{}.*$'.format(service), _)):
|
||||
if any(_ for _ in lines if re.match('^mock_{}.*lazy_load(.*)$'.format(service), _)):
|
||||
return
|
||||
filtered_lines = [_ for _ in lines if re.match('^from.*mock.*$', _)]
|
||||
filtered_lines = [_ for _ in lines if re.match('^mock_.*lazy_load(.*)$', _)]
|
||||
last_import_line_index = lines.index(filtered_lines[-1])
|
||||
|
||||
new_line = 'from .{} import mock_{} # noqa'.format(get_escaped_service(service), get_escaped_service(service))
|
||||
new_line = 'mock_{} = lazy_load(".{}", "mock_{}")'.format(get_escaped_service(service), get_escaped_service(service), get_escaped_service(service))
|
||||
lines.insert(last_import_line_index + 1, new_line)
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
@ -127,23 +127,6 @@ def append_mock_to_init_py(service):
|
||||
f.write(body)
|
||||
|
||||
|
||||
def append_mock_import_to_backends_py(service):
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', 'backends.py')
|
||||
with open(path) as f:
|
||||
lines = [_.replace('\n', '') for _ in f.readlines()]
|
||||
|
||||
if any(_ for _ in lines if re.match('^from moto\.{}.*{}_backends.*$'.format(service, service), _)):
|
||||
return
|
||||
filtered_lines = [_ for _ in lines if re.match('^from.*backends.*$', _)]
|
||||
last_import_line_index = lines.index(filtered_lines[-1])
|
||||
|
||||
new_line = 'from moto.{} import {}_backends'.format(get_escaped_service(service), get_escaped_service(service))
|
||||
lines.insert(last_import_line_index + 1, new_line)
|
||||
|
||||
body = '\n'.join(lines) + '\n'
|
||||
with open(path, 'w') as f:
|
||||
f.write(body)
|
||||
|
||||
def append_mock_dict_to_backends_py(service):
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', 'backends.py')
|
||||
with open(path) as f:
|
||||
@ -154,7 +137,7 @@ def append_mock_dict_to_backends_py(service):
|
||||
filtered_lines = [_ for _ in lines if re.match(".*\".*\":.*_backends.*", _)]
|
||||
last_elem_line_index = lines.index(filtered_lines[-1])
|
||||
|
||||
new_line = " \"{}\": {}_backends,".format(service, get_escaped_service(service))
|
||||
new_line = " \"{}\": (\"{}\", \"{}_backends\"),".format(service, get_escaped_service(service), get_escaped_service(service))
|
||||
prev_line = lines[last_elem_line_index]
|
||||
if not prev_line.endswith('{') and not prev_line.endswith(','):
|
||||
lines[last_elem_line_index] += ','
|
||||
@ -212,7 +195,6 @@ def initialize_service(service, operation, api_protocol):
|
||||
|
||||
# append mock to init files
|
||||
append_mock_to_init_py(service)
|
||||
append_mock_import_to_backends_py(service)
|
||||
append_mock_dict_to_backends_py(service)
|
||||
|
||||
|
||||
@ -229,6 +211,9 @@ def to_snake_case(s):
|
||||
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s)
|
||||
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
||||
|
||||
def get_operation_name_in_keys(operation_name, operation_keys):
|
||||
index = [_.lower() for _ in operation_keys].index(operation_name.lower())
|
||||
return operation_keys[index]
|
||||
|
||||
def get_function_in_responses(service, operation, protocol):
|
||||
"""refers to definition of API in botocore, and autogenerates function
|
||||
@ -237,7 +222,11 @@ def get_function_in_responses(service, operation, protocol):
|
||||
"""
|
||||
client = boto3.client(service)
|
||||
|
||||
aws_operation_name = to_upper_camel_case(operation)
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
)
|
||||
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
if not hasattr(op_model.output_shape, 'members'):
|
||||
outputs = {}
|
||||
@ -282,7 +271,10 @@ def get_function_in_models(service, operation):
|
||||
https://github.com/boto/botocore/blob/develop/botocore/data/elbv2/2015-12-01/service-2.json
|
||||
"""
|
||||
client = boto3.client(service)
|
||||
aws_operation_name = to_upper_camel_case(operation)
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
)
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
inputs = op_model.input_shape.members
|
||||
if not hasattr(op_model.output_shape, 'members'):
|
||||
@ -329,7 +321,11 @@ def get_response_query_template(service, operation):
|
||||
https://github.com/boto/botocore/blob/develop/botocore/data/elbv2/2015-12-01/service-2.json
|
||||
"""
|
||||
client = boto3.client(service)
|
||||
aws_operation_name = to_upper_camel_case(operation)
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
)
|
||||
|
||||
op_model = client._service_model.operation_model(aws_operation_name)
|
||||
result_wrapper = op_model.output_shape.serialization['resultWrapper']
|
||||
response_wrapper = result_wrapper.replace('Result', 'Response')
|
||||
@ -403,11 +399,13 @@ def insert_code_to_class(path, base_class, new_code):
|
||||
with open(path, 'w') as f:
|
||||
f.write(body)
|
||||
|
||||
|
||||
def insert_url(service, operation, api_protocol):
|
||||
client = boto3.client(service)
|
||||
service_class = client.__class__.__name__
|
||||
aws_operation_name = to_upper_camel_case(operation)
|
||||
aws_operation_name = get_operation_name_in_keys(
|
||||
to_upper_camel_case(operation),
|
||||
list(client._service_model._service_description['operations'].keys())
|
||||
)
|
||||
uri = client._service_model.operation_model(aws_operation_name).http['requestUri']
|
||||
|
||||
path = os.path.join(os.path.dirname(__file__), '..', 'moto', get_escaped_service(service), 'urls.py')
|
||||
|
54
setup.py
54
setup.py
@ -33,21 +33,13 @@ install_requires = [
|
||||
"boto>=2.36.0",
|
||||
"boto3>=1.9.201",
|
||||
"botocore>=1.12.201",
|
||||
"cryptography>=2.3.0",
|
||||
"requests>=2.5",
|
||||
"xmltodict",
|
||||
"six>1.9",
|
||||
"werkzeug",
|
||||
"PyYAML>=5.1",
|
||||
"pytz",
|
||||
"python-dateutil<3.0.0,>=2.1",
|
||||
"python-jose[cryptography]>=3.1.0,<4.0.0",
|
||||
"docker>=2.5.1",
|
||||
"jsondiff>=1.1.2",
|
||||
"aws-xray-sdk!=0.96,>=0.93",
|
||||
"responses>=0.9.0",
|
||||
"idna<3,>=2.5",
|
||||
"cfn-lint>=0.4.0",
|
||||
"MarkupSafe<2.0", # This is a Jinja2 dependency, 2.0.0a1 currently seems broken
|
||||
]
|
||||
|
||||
@ -71,7 +63,6 @@ if PY2:
|
||||
"mock<=3.0.5",
|
||||
"more-itertools==5.0.0",
|
||||
"setuptools==44.0.0",
|
||||
"sshpubkeys>=3.1.0,<4.0",
|
||||
"zipp==0.6.0",
|
||||
]
|
||||
else:
|
||||
@ -80,14 +71,57 @@ else:
|
||||
"mock",
|
||||
"more-itertools",
|
||||
"setuptools",
|
||||
"sshpubkeys>=3.1.0",
|
||||
"zipp",
|
||||
]
|
||||
|
||||
_dep_cryptography = "cryptography>=2.3.0"
|
||||
_dep_PyYAML = "PyYAML>=5.1"
|
||||
_dep_python_jose = "python-jose[cryptography]>=3.1.0,<4.0.0"
|
||||
_dep_python_jose_ecdsa_pin = "ecdsa<0.15" # https://github.com/spulec/moto/pull/3263#discussion_r477404984
|
||||
_dep_docker = "docker>=2.5.1"
|
||||
_dep_jsondiff = "jsondiff>=1.1.2"
|
||||
_dep_aws_xray_sdk = "aws-xray-sdk!=0.96,>=0.93"
|
||||
_dep_idna = "idna<3,>=2.5"
|
||||
_dep_cfn_lint = "cfn-lint>=0.4.0"
|
||||
_dep_sshpubkeys_py2 = "sshpubkeys>=3.1.0,<4.0; python_version<'3'"
|
||||
_dep_sshpubkeys_py3 = "sshpubkeys>=3.1.0; python_version>'3'"
|
||||
|
||||
all_extra_deps = [
|
||||
_dep_cryptography,
|
||||
_dep_PyYAML,
|
||||
_dep_python_jose,
|
||||
_dep_python_jose_ecdsa_pin,
|
||||
_dep_docker,
|
||||
_dep_jsondiff,
|
||||
_dep_aws_xray_sdk,
|
||||
_dep_idna,
|
||||
_dep_cfn_lint,
|
||||
_dep_sshpubkeys_py2,
|
||||
_dep_sshpubkeys_py3,
|
||||
]
|
||||
|
||||
# TODO: do we want to add ALL services here?
|
||||
# i.e. even those without extra dependencies.
|
||||
# Would be good for future-compatibility, I guess.
|
||||
extras_per_service = {
|
||||
"ec2": [_dep_cryptography, _dep_sshpubkeys_py2, _dep_sshpubkeys_py3],
|
||||
'acm': [_dep_cryptography],
|
||||
'iam': [_dep_cryptography],
|
||||
'cloudformation': [_dep_PyYAML, _dep_cfn_lint],
|
||||
'cognitoidp': [_dep_python_jose, _dep_python_jose_ecdsa_pin],
|
||||
'awslambda': [_dep_docker],
|
||||
'batch': [_dep_docker],
|
||||
'iotdata': [_dep_jsondiff],
|
||||
'xray': [_dep_aws_xray_sdk],
|
||||
}
|
||||
|
||||
extras_require = {
|
||||
'all': all_extra_deps,
|
||||
'server': ['flask'],
|
||||
}
|
||||
|
||||
extras_require.update(extras_per_service)
|
||||
|
||||
# https://hynek.me/articles/conditional-python-dependencies/
|
||||
if int(setuptools.__version__.split(".", 1)[0]) < 18:
|
||||
if sys.version_info[0:2] < (3, 3):
|
||||
|
@ -1858,6 +1858,23 @@ def test_create_api_key():
|
||||
client.create_api_key.when.called_with(**payload).should.throw(ClientError)
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
def test_create_api_headers():
|
||||
region_name = "us-west-2"
|
||||
client = boto3.client("apigateway", region_name=region_name)
|
||||
|
||||
apikey_value = "12345"
|
||||
apikey_name = "TESTKEY1"
|
||||
payload = {"value": apikey_value, "name": apikey_name}
|
||||
|
||||
client.create_api_key(**payload)
|
||||
with assert_raises(ClientError) as ex:
|
||||
client.create_api_key(**payload)
|
||||
ex.exception.response["Error"]["Code"].should.equal("ConflictException")
|
||||
if not settings.TEST_SERVER_MODE:
|
||||
ex.exception.response["ResponseMetadata"]["HTTPHeaders"].should.equal({})
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
def test_api_keys():
|
||||
region_name = "us-west-2"
|
||||
|
@ -592,7 +592,7 @@ def test_boto3_create_stack_set_with_yaml():
|
||||
@mock_cloudformation
|
||||
@mock_s3
|
||||
def test_create_stack_set_from_s3_url():
|
||||
s3 = boto3.client("s3")
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
s3_conn = boto3.resource("s3", region_name="us-east-1")
|
||||
s3_conn.create_bucket(Bucket="foobar")
|
||||
|
||||
@ -704,7 +704,7 @@ def test_boto3_create_stack_with_short_form_func_yaml():
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_get_template_summary():
|
||||
s3 = boto3.client("s3")
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
s3_conn = boto3.resource("s3", region_name="us-east-1")
|
||||
|
||||
conn = boto3.client("cloudformation", region_name="us-east-1")
|
||||
@ -802,7 +802,7 @@ def test_create_stack_with_role_arn():
|
||||
@mock_cloudformation
|
||||
@mock_s3
|
||||
def test_create_stack_from_s3_url():
|
||||
s3 = boto3.client("s3")
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
s3_conn = boto3.resource("s3", region_name="us-east-1")
|
||||
s3_conn.create_bucket(Bucket="foobar")
|
||||
|
||||
@ -857,7 +857,7 @@ def test_update_stack_with_previous_value():
|
||||
@mock_s3
|
||||
@mock_ec2
|
||||
def test_update_stack_from_s3_url():
|
||||
s3 = boto3.client("s3")
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
s3_conn = boto3.resource("s3", region_name="us-east-1")
|
||||
|
||||
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
|
||||
@ -886,7 +886,7 @@ def test_update_stack_from_s3_url():
|
||||
@mock_cloudformation
|
||||
@mock_s3
|
||||
def test_create_change_set_from_s3_url():
|
||||
s3 = boto3.client("s3")
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
s3_conn = boto3.resource("s3", region_name="us-east-1")
|
||||
s3_conn.create_bucket(Bucket="foobar")
|
||||
|
||||
|
@ -118,7 +118,7 @@ def test_boto3_yaml_validate_successful():
|
||||
@mock_cloudformation
|
||||
@mock_s3
|
||||
def test_boto3_yaml_validate_template_url_successful():
|
||||
s3 = boto3.client("s3")
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
s3_conn = boto3.resource("s3", region_name="us-east-1")
|
||||
s3_conn.create_bucket(Bucket="foobar")
|
||||
|
||||
|
@ -4,6 +4,9 @@ import json
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import hmac
|
||||
import hashlib
|
||||
import base64
|
||||
|
||||
import requests
|
||||
import uuid
|
||||
@ -1248,6 +1251,137 @@ def test_authentication_flow():
|
||||
authentication_flow(conn)
|
||||
|
||||
|
||||
def user_authentication_flow(conn):
|
||||
username = str(uuid.uuid4())
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
user_attribute_name = str(uuid.uuid4())
|
||||
user_attribute_value = str(uuid.uuid4())
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id,
|
||||
ClientName=str(uuid.uuid4()),
|
||||
ReadAttributes=[user_attribute_name],
|
||||
GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
|
||||
conn.sign_up(
|
||||
ClientId=client_id, Username=username, Password=password,
|
||||
)
|
||||
|
||||
client_secret = conn.describe_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientId=client_id,
|
||||
)["UserPoolClient"]["ClientSecret"]
|
||||
|
||||
conn.confirm_sign_up(
|
||||
ClientId=client_id, Username=username, ConfirmationCode="123456",
|
||||
)
|
||||
|
||||
# generating secret hash
|
||||
key = bytes(str(client_secret).encode("latin-1"))
|
||||
msg = bytes(str(username + client_id).encode("latin-1"))
|
||||
new_digest = hmac.new(key, msg, hashlib.sha256).digest()
|
||||
secret_hash = base64.b64encode(new_digest).decode()
|
||||
|
||||
result = conn.initiate_auth(
|
||||
ClientId=client_id,
|
||||
AuthFlow="USER_SRP_AUTH",
|
||||
AuthParameters={
|
||||
"USERNAME": username,
|
||||
"SRP_A": str(uuid.uuid4()),
|
||||
"SECRET_HASH": secret_hash,
|
||||
},
|
||||
)
|
||||
|
||||
result = conn.respond_to_auth_challenge(
|
||||
ClientId=client_id,
|
||||
ChallengeName=result["ChallengeName"],
|
||||
ChallengeResponses={
|
||||
"PASSWORD_CLAIM_SIGNATURE": str(uuid.uuid4()),
|
||||
"PASSWORD_CLAIM_SECRET_BLOCK": result["Session"],
|
||||
"TIMESTAMP": str(uuid.uuid4()),
|
||||
"USERNAME": username,
|
||||
},
|
||||
)
|
||||
|
||||
refresh_token = result["AuthenticationResult"]["RefreshToken"]
|
||||
|
||||
# add mfa token
|
||||
conn.associate_software_token(
|
||||
AccessToken=result["AuthenticationResult"]["AccessToken"],
|
||||
)
|
||||
|
||||
conn.verify_software_token(
|
||||
AccessToken=result["AuthenticationResult"]["AccessToken"], UserCode="123456",
|
||||
)
|
||||
|
||||
conn.set_user_mfa_preference(
|
||||
AccessToken=result["AuthenticationResult"]["AccessToken"],
|
||||
SoftwareTokenMfaSettings={"Enabled": True, "PreferredMfa": True,},
|
||||
)
|
||||
|
||||
result = conn.initiate_auth(
|
||||
ClientId=client_id,
|
||||
AuthFlow="REFRESH_TOKEN",
|
||||
AuthParameters={"SECRET_HASH": secret_hash, "REFRESH_TOKEN": refresh_token,},
|
||||
)
|
||||
|
||||
result["AuthenticationResult"]["IdToken"].should_not.be.none
|
||||
result["AuthenticationResult"]["AccessToken"].should_not.be.none
|
||||
|
||||
# authenticate user once again this time with mfa token
|
||||
result = conn.initiate_auth(
|
||||
ClientId=client_id,
|
||||
AuthFlow="USER_SRP_AUTH",
|
||||
AuthParameters={
|
||||
"USERNAME": username,
|
||||
"SRP_A": str(uuid.uuid4()),
|
||||
"SECRET_HASH": secret_hash,
|
||||
},
|
||||
)
|
||||
|
||||
result = conn.respond_to_auth_challenge(
|
||||
ClientId=client_id,
|
||||
ChallengeName=result["ChallengeName"],
|
||||
ChallengeResponses={
|
||||
"PASSWORD_CLAIM_SIGNATURE": str(uuid.uuid4()),
|
||||
"PASSWORD_CLAIM_SECRET_BLOCK": result["Session"],
|
||||
"TIMESTAMP": str(uuid.uuid4()),
|
||||
"USERNAME": username,
|
||||
},
|
||||
)
|
||||
|
||||
result = conn.respond_to_auth_challenge(
|
||||
ClientId=client_id,
|
||||
Session=result["Session"],
|
||||
ChallengeName=result["ChallengeName"],
|
||||
ChallengeResponses={
|
||||
"SOFTWARE_TOKEN_MFA_CODE": "123456",
|
||||
"USERNAME": username,
|
||||
"SECRET_HASH": secret_hash,
|
||||
},
|
||||
)
|
||||
|
||||
return {
|
||||
"user_pool_id": user_pool_id,
|
||||
"client_id": client_id,
|
||||
"client_secret": client_secret,
|
||||
"secret_hash": secret_hash,
|
||||
"id_token": result["AuthenticationResult"]["IdToken"],
|
||||
"access_token": result["AuthenticationResult"]["AccessToken"],
|
||||
"refresh_token": refresh_token,
|
||||
"username": username,
|
||||
"password": password,
|
||||
"additional_fields": {user_attribute_name: user_attribute_value},
|
||||
}
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_user_authentication_flow():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
|
||||
user_authentication_flow(conn)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_token_legitimacy():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
@ -1437,6 +1571,244 @@ def test_resource_server():
|
||||
ex.exception.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_sign_up():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()),
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
username = str(uuid.uuid4())
|
||||
password = str(uuid.uuid4())
|
||||
result = conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
result["UserConfirmed"].should.be.false
|
||||
result["UserSub"].should_not.be.none
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_confirm_sign_up():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
username = str(uuid.uuid4())
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
|
||||
conn.confirm_sign_up(
|
||||
ClientId=client_id, Username=username, ConfirmationCode="123456",
|
||||
)
|
||||
|
||||
result = conn.admin_get_user(UserPoolId=user_pool_id, Username=username)
|
||||
result["UserStatus"].should.equal("CONFIRMED")
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_initiate_auth_USER_SRP_AUTH():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
username = str(uuid.uuid4())
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
client_secret = conn.describe_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientId=client_id,
|
||||
)["UserPoolClient"]["ClientSecret"]
|
||||
conn.confirm_sign_up(
|
||||
ClientId=client_id, Username=username, ConfirmationCode="123456",
|
||||
)
|
||||
|
||||
key = bytes(str(client_secret).encode("latin-1"))
|
||||
msg = bytes(str(username + client_id).encode("latin-1"))
|
||||
new_digest = hmac.new(key, msg, hashlib.sha256).digest()
|
||||
secret_hash = base64.b64encode(new_digest).decode()
|
||||
|
||||
result = conn.initiate_auth(
|
||||
ClientId=client_id,
|
||||
AuthFlow="USER_SRP_AUTH",
|
||||
AuthParameters={
|
||||
"USERNAME": username,
|
||||
"SRP_A": str(uuid.uuid4()),
|
||||
"SECRET_HASH": secret_hash,
|
||||
},
|
||||
)
|
||||
|
||||
result["ChallengeName"].should.equal("PASSWORD_VERIFIER")
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_initiate_auth_REFRESH_TOKEN():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
result = user_authentication_flow(conn)
|
||||
result = conn.initiate_auth(
|
||||
ClientId=result["client_id"],
|
||||
AuthFlow="REFRESH_TOKEN",
|
||||
AuthParameters={
|
||||
"REFRESH_TOKEN": result["refresh_token"],
|
||||
"SECRET_HASH": result["secret_hash"],
|
||||
},
|
||||
)
|
||||
|
||||
result["AuthenticationResult"]["AccessToken"].should_not.be.none
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_initiate_auth_for_unconfirmed_user():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
username = str(uuid.uuid4())
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
client_secret = conn.describe_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientId=client_id,
|
||||
)["UserPoolClient"]["ClientSecret"]
|
||||
|
||||
key = bytes(str(client_secret).encode("latin-1"))
|
||||
msg = bytes(str(username + client_id).encode("latin-1"))
|
||||
new_digest = hmac.new(key, msg, hashlib.sha256).digest()
|
||||
secret_hash = base64.b64encode(new_digest).decode()
|
||||
|
||||
caught = False
|
||||
try:
|
||||
result = conn.initiate_auth(
|
||||
ClientId=client_id,
|
||||
AuthFlow="USER_SRP_AUTH",
|
||||
AuthParameters={
|
||||
"USERNAME": username,
|
||||
"SRP_A": str(uuid.uuid4()),
|
||||
"SECRET_HASH": secret_hash,
|
||||
},
|
||||
)
|
||||
except conn.exceptions.UserNotConfirmedException:
|
||||
caught = True
|
||||
|
||||
caught.should.be.true
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_initiate_auth_with_invalid_secret_hash():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
username = str(uuid.uuid4())
|
||||
password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientName=str(uuid.uuid4()), GenerateSecret=True,
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
conn.sign_up(ClientId=client_id, Username=username, Password=password)
|
||||
client_secret = conn.describe_user_pool_client(
|
||||
UserPoolId=user_pool_id, ClientId=client_id,
|
||||
)["UserPoolClient"]["ClientSecret"]
|
||||
conn.confirm_sign_up(
|
||||
ClientId=client_id, Username=username, ConfirmationCode="123456",
|
||||
)
|
||||
|
||||
invalid_secret_hash = str(uuid.uuid4())
|
||||
|
||||
caught = False
|
||||
try:
|
||||
result = conn.initiate_auth(
|
||||
ClientId=client_id,
|
||||
AuthFlow="USER_SRP_AUTH",
|
||||
AuthParameters={
|
||||
"USERNAME": username,
|
||||
"SRP_A": str(uuid.uuid4()),
|
||||
"SECRET_HASH": invalid_secret_hash,
|
||||
},
|
||||
)
|
||||
except conn.exceptions.NotAuthorizedException:
|
||||
caught = True
|
||||
|
||||
caught.should.be.true
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_setting_mfa():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
result = authentication_flow(conn)
|
||||
conn.associate_software_token(AccessToken=result["access_token"])
|
||||
conn.verify_software_token(AccessToken=result["access_token"], UserCode="123456")
|
||||
conn.set_user_mfa_preference(
|
||||
AccessToken=result["access_token"],
|
||||
SoftwareTokenMfaSettings={"Enabled": True, "PreferredMfa": True},
|
||||
)
|
||||
result = conn.admin_get_user(
|
||||
UserPoolId=result["user_pool_id"], Username=result["username"]
|
||||
)
|
||||
|
||||
result["UserMFASettingList"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_setting_mfa_when_token_not_verified():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
result = authentication_flow(conn)
|
||||
conn.associate_software_token(AccessToken=result["access_token"])
|
||||
|
||||
caught = False
|
||||
try:
|
||||
conn.set_user_mfa_preference(
|
||||
AccessToken=result["access_token"],
|
||||
SoftwareTokenMfaSettings={"Enabled": True, "PreferredMfa": True},
|
||||
)
|
||||
except conn.exceptions.InvalidParameterException:
|
||||
caught = True
|
||||
|
||||
caught.should.be.true
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_respond_to_auth_challenge_with_invalid_secret_hash():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
result = user_authentication_flow(conn)
|
||||
|
||||
valid_secret_hash = result["secret_hash"]
|
||||
invalid_secret_hash = str(uuid.uuid4())
|
||||
|
||||
challenge = conn.initiate_auth(
|
||||
ClientId=result["client_id"],
|
||||
AuthFlow="USER_SRP_AUTH",
|
||||
AuthParameters={
|
||||
"USERNAME": result["username"],
|
||||
"SRP_A": str(uuid.uuid4()),
|
||||
"SECRET_HASH": valid_secret_hash,
|
||||
},
|
||||
)
|
||||
|
||||
challenge = conn.respond_to_auth_challenge(
|
||||
ClientId=result["client_id"],
|
||||
ChallengeName=challenge["ChallengeName"],
|
||||
ChallengeResponses={
|
||||
"PASSWORD_CLAIM_SIGNATURE": str(uuid.uuid4()),
|
||||
"PASSWORD_CLAIM_SECRET_BLOCK": challenge["Session"],
|
||||
"TIMESTAMP": str(uuid.uuid4()),
|
||||
"USERNAME": result["username"],
|
||||
},
|
||||
)
|
||||
|
||||
caught = False
|
||||
try:
|
||||
conn.respond_to_auth_challenge(
|
||||
ClientId=result["client_id"],
|
||||
Session=challenge["Session"],
|
||||
ChallengeName=challenge["ChallengeName"],
|
||||
ChallengeResponses={
|
||||
"SOFTWARE_TOKEN_MFA_CODE": "123456",
|
||||
"USERNAME": result["username"],
|
||||
"SECRET_HASH": invalid_secret_hash,
|
||||
},
|
||||
)
|
||||
except conn.exceptions.NotAuthorizedException:
|
||||
caught = True
|
||||
|
||||
caught.should.be.true
|
||||
|
||||
|
||||
# Test will retrieve public key from cognito.amazonaws.com/.well-known/jwks.json,
|
||||
# which isnt mocked in ServerMode
|
||||
if not settings.TEST_SERVER_MODE:
|
||||
|
@ -23,6 +23,11 @@ from moto import mock_ec2_deprecated, mock_ec2, mock_cloudformation
|
||||
from tests.helpers import requires_boto_gte
|
||||
|
||||
|
||||
if six.PY2:
|
||||
decode_method = base64.decodestring
|
||||
else:
|
||||
decode_method = base64.decodebytes
|
||||
|
||||
################ Test Readme ###############
|
||||
def add_servers(ami_id, count):
|
||||
conn = boto.connect_ec2()
|
||||
@ -908,7 +913,7 @@ def test_user_data_with_run_instance():
|
||||
instance_attribute = instance.get_attribute("userData")
|
||||
instance_attribute.should.be.a(InstanceAttribute)
|
||||
retrieved_user_data = instance_attribute.get("userData").encode("utf-8")
|
||||
decoded_user_data = base64.decodestring(retrieved_user_data)
|
||||
decoded_user_data = decode_method(retrieved_user_data)
|
||||
decoded_user_data.should.equal(b"some user data")
|
||||
|
||||
|
||||
|
@ -75,6 +75,18 @@ def test_subnet_should_have_proper_availability_zone_set():
|
||||
subnetA.availability_zone.should.equal("us-west-1b")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_availability_zone_in_create_subnet():
|
||||
ec2 = boto3.resource("ec2", region_name="us-west-1")
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock="172.31.0.0/16")
|
||||
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock="172.31.48.0/20", AvailabilityZoneId="use1-az6"
|
||||
)
|
||||
subnet.availability_zone_id.should.equal("use1-az6")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_default_subnet():
|
||||
ec2 = boto3.resource("ec2", region_name="us-west-1")
|
||||
@ -612,7 +624,15 @@ def test_run_instances_should_attach_to_default_subnet():
|
||||
# Assert subnet is created appropriately
|
||||
subnets = client.describe_subnets()["Subnets"]
|
||||
default_subnet_id = subnets[0]["SubnetId"]
|
||||
instances["Instances"][0]["NetworkInterfaces"][0]["SubnetId"].should.equal(
|
||||
default_subnet_id
|
||||
if len(subnets) > 1:
|
||||
default_subnet_id1 = subnets[1]["SubnetId"]
|
||||
assert (
|
||||
instances["Instances"][0]["NetworkInterfaces"][0]["SubnetId"]
|
||||
== default_subnet_id
|
||||
or instances["Instances"][0]["NetworkInterfaces"][0]["SubnetId"]
|
||||
== default_subnet_id1
|
||||
)
|
||||
assert (
|
||||
subnets[0]["AvailableIpAddressCount"] == 4090
|
||||
or subnets[1]["AvailableIpAddressCount"] == 4090
|
||||
)
|
||||
subnets[0]["AvailableIpAddressCount"].should.equal(4090)
|
||||
|
@ -254,6 +254,7 @@ def test_describe_task_definition():
|
||||
"logConfiguration": {"logDriver": "json-file"},
|
||||
}
|
||||
],
|
||||
tags=[{"key": "Name", "value": "test_ecs_task"}],
|
||||
)
|
||||
_ = client.register_task_definition(
|
||||
family="test_ecs_task",
|
||||
@ -297,6 +298,11 @@ def test_describe_task_definition():
|
||||
"arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:2"
|
||||
)
|
||||
|
||||
response = client.describe_task_definition(
|
||||
taskDefinition="test_ecs_task:1", include=["TAGS"]
|
||||
)
|
||||
response["tags"].should.equal([{"key": "Name", "value": "test_ecs_task"}])
|
||||
|
||||
|
||||
@mock_ecs
|
||||
def test_deregister_task_definition():
|
||||
@ -512,6 +518,7 @@ def test_describe_services():
|
||||
serviceName="test_ecs_service1",
|
||||
taskDefinition="test_ecs_task",
|
||||
desiredCount=2,
|
||||
tags=[{"key": "Name", "value": "test_ecs_service1"}],
|
||||
)
|
||||
_ = client.create_service(
|
||||
cluster="test_ecs_cluster",
|
||||
@ -554,6 +561,18 @@ def test_describe_services():
|
||||
datetime.now()
|
||||
- response["services"][0]["deployments"][0]["updatedAt"].replace(tzinfo=None)
|
||||
).seconds.should.be.within(0, 10)
|
||||
response = client.describe_services(
|
||||
cluster="test_ecs_cluster",
|
||||
services=[
|
||||
"test_ecs_service1",
|
||||
"arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2",
|
||||
],
|
||||
include=["TAGS"],
|
||||
)
|
||||
response["services"][0]["tags"].should.equal(
|
||||
[{"key": "Name", "value": "test_ecs_service1"}]
|
||||
)
|
||||
response["services"][1]["tags"].should.equal([])
|
||||
|
||||
|
||||
@mock_ecs
|
||||
|
@ -5,12 +5,9 @@ import json
|
||||
import boto
|
||||
import boto3
|
||||
import csv
|
||||
import os
|
||||
import sure # noqa
|
||||
import sys
|
||||
from boto.exception import BotoServerError
|
||||
from botocore.exceptions import ClientError
|
||||
from dateutil.tz import tzutc
|
||||
|
||||
from moto import mock_iam, mock_iam_deprecated, settings
|
||||
from moto.core import ACCOUNT_ID
|
||||
|
1196
tests/test_iam/test_iam_cloudformation.py
Normal file
1196
tests/test_iam/test_iam_cloudformation.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -463,6 +463,46 @@ def test_list_things_with_attribute_and_thing_type_filter_and_next_token():
|
||||
)
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_endpoints():
|
||||
region_name = "ap-northeast-1"
|
||||
client = boto3.client("iot", region_name=region_name)
|
||||
|
||||
# iot:Data
|
||||
endpoint = client.describe_endpoint(endpointType="iot:Data")
|
||||
endpoint.should.have.key("endpointAddress").which.should_not.contain("ats")
|
||||
endpoint.should.have.key("endpointAddress").which.should.contain(
|
||||
"iot.{}.amazonaws.com".format(region_name)
|
||||
)
|
||||
|
||||
# iot:Data-ATS
|
||||
endpoint = client.describe_endpoint(endpointType="iot:Data-ATS")
|
||||
endpoint.should.have.key("endpointAddress").which.should.contain(
|
||||
"ats.iot.{}.amazonaws.com".format(region_name)
|
||||
)
|
||||
|
||||
# iot:Data-ATS
|
||||
endpoint = client.describe_endpoint(endpointType="iot:CredentialProvider")
|
||||
endpoint.should.have.key("endpointAddress").which.should.contain(
|
||||
"credentials.iot.{}.amazonaws.com".format(region_name)
|
||||
)
|
||||
|
||||
# iot:Data-ATS
|
||||
endpoint = client.describe_endpoint(endpointType="iot:Jobs")
|
||||
endpoint.should.have.key("endpointAddress").which.should.contain(
|
||||
"jobs.iot.{}.amazonaws.com".format(region_name)
|
||||
)
|
||||
|
||||
# raise InvalidRequestException
|
||||
try:
|
||||
client.describe_endpoint(endpointType="iot:Abc")
|
||||
except client.exceptions.InvalidRequestException as exc:
|
||||
error_code = exc.response["Error"]["Code"]
|
||||
error_code.should.equal("InvalidRequestException")
|
||||
else:
|
||||
raise Exception("Should have raised error")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_certs():
|
||||
client = boto3.client("iot", region_name="us-east-1")
|
||||
@ -523,6 +563,26 @@ def test_certs():
|
||||
res = client.list_certificates()
|
||||
res.should.have.key("certificates")
|
||||
|
||||
# Test register_certificate without CA flow
|
||||
cert = client.register_certificate_without_ca(
|
||||
certificatePem=cert_pem, status="INACTIVE"
|
||||
)
|
||||
cert.should.have.key("certificateId").which.should_not.be.none
|
||||
cert.should.have.key("certificateArn").which.should_not.be.none
|
||||
cert_id = cert["certificateId"]
|
||||
|
||||
res = client.list_certificates()
|
||||
res.should.have.key("certificates").which.should.have.length_of(1)
|
||||
for cert in res["certificates"]:
|
||||
cert.should.have.key("certificateArn").which.should_not.be.none
|
||||
cert.should.have.key("certificateId").which.should_not.be.none
|
||||
cert.should.have.key("status").which.should_not.be.none
|
||||
cert.should.have.key("creationDate").which.should_not.be.none
|
||||
|
||||
client.delete_certificate(certificateId=cert_id)
|
||||
res = client.list_certificates()
|
||||
res.should.have.key("certificates")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_delete_policy_validation():
|
||||
|
@ -73,6 +73,12 @@ Resources:
|
||||
Properties:
|
||||
Name: MyStream
|
||||
ShardCount: 4
|
||||
RetentionPeriodHours: 48
|
||||
Tags:
|
||||
- Key: TagKey1
|
||||
Value: TagValue1
|
||||
- Key: TagKey2
|
||||
Value: TagValue2
|
||||
""".strip()
|
||||
|
||||
cf_conn.create_stack(StackName=stack_name, TemplateBody=template)
|
||||
@ -83,6 +89,14 @@ Resources:
|
||||
stream_description = kinesis_conn.describe_stream(StreamName="MyStream")[
|
||||
"StreamDescription"
|
||||
]
|
||||
stream_description["RetentionPeriodHours"].should.equal(48)
|
||||
|
||||
tags = kinesis_conn.list_tags_for_stream(StreamName="MyStream")["Tags"]
|
||||
tag1_value = [tag for tag in tags if tag["Key"] == "TagKey1"][0]["Value"]
|
||||
tag2_value = [tag for tag in tags if tag["Key"] == "TagKey2"][0]["Value"]
|
||||
tag1_value.should.equal("TagValue1")
|
||||
tag2_value.should.equal("TagValue2")
|
||||
|
||||
shards_provisioned = len(
|
||||
[
|
||||
shard
|
||||
@ -98,12 +112,27 @@ Resources:
|
||||
Type: AWS::Kinesis::Stream
|
||||
Properties:
|
||||
ShardCount: 6
|
||||
RetentionPeriodHours: 24
|
||||
Tags:
|
||||
- Key: TagKey1
|
||||
Value: TagValue1a
|
||||
- Key: TagKey2
|
||||
Value: TagValue2a
|
||||
|
||||
""".strip()
|
||||
cf_conn.update_stack(StackName=stack_name, TemplateBody=template)
|
||||
|
||||
stream_description = kinesis_conn.describe_stream(StreamName="MyStream")[
|
||||
"StreamDescription"
|
||||
]
|
||||
stream_description["RetentionPeriodHours"].should.equal(24)
|
||||
|
||||
tags = kinesis_conn.list_tags_for_stream(StreamName="MyStream")["Tags"]
|
||||
tag1_value = [tag for tag in tags if tag["Key"] == "TagKey1"][0]["Value"]
|
||||
tag2_value = [tag for tag in tags if tag["Key"] == "TagKey2"][0]["Value"]
|
||||
tag1_value.should.equal("TagValue1a")
|
||||
tag2_value.should.equal("TagValue2a")
|
||||
|
||||
shards_provisioned = len(
|
||||
[
|
||||
shard
|
||||
|
140
tests/test_kinesisvideo/test_kinesisvideo.py
Normal file
140
tests/test_kinesisvideo/test_kinesisvideo.py
Normal file
@ -0,0 +1,140 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from moto import mock_kinesisvideo
|
||||
from botocore.exceptions import ClientError
|
||||
import json
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_create_stream():
|
||||
client = boto3.client("kinesisvideo", region_name="ap-northeast-1")
|
||||
stream_name = "my-stream"
|
||||
device_name = "random-device"
|
||||
|
||||
# stream can be created
|
||||
res = client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
res.should.have.key("StreamARN").which.should.contain(stream_name)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_create_stream_with_same_name():
|
||||
client = boto3.client("kinesisvideo", region_name="ap-northeast-1")
|
||||
stream_name = "my-stream"
|
||||
device_name = "random-device"
|
||||
|
||||
client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
|
||||
# cannot create with same stream name
|
||||
with assert_raises(ClientError):
|
||||
client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_describe_stream():
|
||||
client = boto3.client("kinesisvideo", region_name="ap-northeast-1")
|
||||
stream_name = "my-stream"
|
||||
device_name = "random-device"
|
||||
|
||||
res = client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
res.should.have.key("StreamARN").which.should.contain(stream_name)
|
||||
stream_arn = res["StreamARN"]
|
||||
|
||||
# cannot create with existing stream name
|
||||
with assert_raises(ClientError):
|
||||
client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
|
||||
# stream can be described with name
|
||||
res = client.describe_stream(StreamName=stream_name)
|
||||
res.should.have.key("StreamInfo")
|
||||
stream_info = res["StreamInfo"]
|
||||
stream_info.should.have.key("StreamARN").which.should.contain(stream_name)
|
||||
stream_info.should.have.key("StreamName").which.should.equal(stream_name)
|
||||
stream_info.should.have.key("DeviceName").which.should.equal(device_name)
|
||||
|
||||
# stream can be described with arn
|
||||
res = client.describe_stream(StreamARN=stream_arn)
|
||||
res.should.have.key("StreamInfo")
|
||||
stream_info = res["StreamInfo"]
|
||||
stream_info.should.have.key("StreamARN").which.should.contain(stream_name)
|
||||
stream_info.should.have.key("StreamName").which.should.equal(stream_name)
|
||||
stream_info.should.have.key("DeviceName").which.should.equal(device_name)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_describe_stream_with_name_not_exist():
|
||||
client = boto3.client("kinesisvideo", region_name="ap-northeast-1")
|
||||
stream_name_not_exist = "not-exist-stream"
|
||||
|
||||
# cannot describe with not exist stream name
|
||||
with assert_raises(ClientError):
|
||||
client.describe_stream(StreamName=stream_name_not_exist)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_list_streams():
|
||||
client = boto3.client("kinesisvideo", region_name="ap-northeast-1")
|
||||
stream_name = "my-stream"
|
||||
stream_name_2 = "my-stream-2"
|
||||
device_name = "random-device"
|
||||
|
||||
client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
client.create_stream(StreamName=stream_name_2, DeviceName=device_name)
|
||||
|
||||
# streams can be listed
|
||||
res = client.list_streams()
|
||||
res.should.have.key("StreamInfoList")
|
||||
streams = res["StreamInfoList"]
|
||||
streams.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_delete_stream():
|
||||
client = boto3.client("kinesisvideo", region_name="ap-northeast-1")
|
||||
stream_name = "my-stream"
|
||||
stream_name_2 = "my-stream-2"
|
||||
device_name = "random-device"
|
||||
|
||||
client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
res = client.create_stream(StreamName=stream_name_2, DeviceName=device_name)
|
||||
stream_2_arn = res["StreamARN"]
|
||||
|
||||
# stream can be deleted
|
||||
client.delete_stream(StreamARN=stream_2_arn)
|
||||
res = client.list_streams()
|
||||
streams = res["StreamInfoList"]
|
||||
streams.should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_delete_stream_with_arn_not_exist():
|
||||
client = boto3.client("kinesisvideo", region_name="ap-northeast-1")
|
||||
stream_name = "my-stream"
|
||||
stream_name_2 = "my-stream-2"
|
||||
device_name = "random-device"
|
||||
|
||||
client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
res = client.create_stream(StreamName=stream_name_2, DeviceName=device_name)
|
||||
stream_2_arn = res["StreamARN"]
|
||||
|
||||
client.delete_stream(StreamARN=stream_2_arn)
|
||||
|
||||
# cannot delete with not exist stream
|
||||
stream_arn_not_exist = stream_2_arn
|
||||
with assert_raises(ClientError):
|
||||
client.delete_stream(StreamARN=stream_arn_not_exist)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_data_endpoint():
|
||||
client = boto3.client("kinesisvideo", region_name="ap-northeast-1")
|
||||
stream_name = "my-stream"
|
||||
device_name = "random-device"
|
||||
|
||||
# data-endpoint can be created
|
||||
api_name = "GET_MEDIA"
|
||||
client.create_stream(StreamName=stream_name, DeviceName=device_name)
|
||||
res = client.get_data_endpoint(StreamName=stream_name, APIName=api_name)
|
||||
res.should.have.key("DataEndpoint")
|
18
tests/test_kinesisvideo/test_server.py
Normal file
18
tests/test_kinesisvideo/test_server.py
Normal file
@ -0,0 +1,18 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_kinesisvideo
|
||||
|
||||
"""
|
||||
Test the different server responses
|
||||
"""
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
def test_kinesisvideo_server_is_up():
|
||||
backend = server.create_backend_app("kinesisvideo")
|
||||
test_client = backend.test_client()
|
||||
res = test_client.post("/listStreams")
|
||||
res.status_code.should.equal(200)
|
@ -0,0 +1,86 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from moto import mock_kinesisvideoarchivedmedia
|
||||
from moto import mock_kinesisvideo
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
@mock_kinesisvideoarchivedmedia
|
||||
def test_get_hls_streaming_session_url():
|
||||
region_name = "ap-northeast-1"
|
||||
kvs_client = boto3.client("kinesisvideo", region_name=region_name)
|
||||
stream_name = "my-stream"
|
||||
kvs_client.create_stream(StreamName=stream_name)
|
||||
|
||||
api_name = "GET_HLS_STREAMING_SESSION_URL"
|
||||
res = kvs_client.get_data_endpoint(StreamName=stream_name, APIName=api_name)
|
||||
data_endpoint = res["DataEndpoint"]
|
||||
|
||||
client = boto3.client(
|
||||
"kinesis-video-archived-media",
|
||||
region_name=region_name,
|
||||
endpoint_url=data_endpoint,
|
||||
)
|
||||
res = client.get_hls_streaming_session_url(StreamName=stream_name,)
|
||||
reg_exp = "^{}/hls/v1/getHLSMasterPlaylist.m3u8\?SessionToken\=.+$".format(
|
||||
data_endpoint
|
||||
)
|
||||
res.should.have.key("HLSStreamingSessionURL").which.should.match(reg_exp)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
@mock_kinesisvideoarchivedmedia
|
||||
def test_get_dash_streaming_session_url():
|
||||
region_name = "ap-northeast-1"
|
||||
kvs_client = boto3.client("kinesisvideo", region_name=region_name)
|
||||
stream_name = "my-stream"
|
||||
kvs_client.create_stream(StreamName=stream_name)
|
||||
|
||||
api_name = "GET_DASH_STREAMING_SESSION_URL"
|
||||
res = kvs_client.get_data_endpoint(StreamName=stream_name, APIName=api_name)
|
||||
data_endpoint = res["DataEndpoint"]
|
||||
|
||||
client = boto3.client(
|
||||
"kinesis-video-archived-media",
|
||||
region_name=region_name,
|
||||
endpoint_url=data_endpoint,
|
||||
)
|
||||
res = client.get_dash_streaming_session_url(StreamName=stream_name,)
|
||||
reg_exp = "^{}/dash/v1/getDASHManifest.mpd\?SessionToken\=.+$".format(data_endpoint)
|
||||
res.should.have.key("DASHStreamingSessionURL").which.should.match(reg_exp)
|
||||
|
||||
|
||||
@mock_kinesisvideo
|
||||
@mock_kinesisvideoarchivedmedia
|
||||
def test_get_clip():
|
||||
region_name = "ap-northeast-1"
|
||||
kvs_client = boto3.client("kinesisvideo", region_name=region_name)
|
||||
stream_name = "my-stream"
|
||||
kvs_client.create_stream(StreamName=stream_name)
|
||||
|
||||
api_name = "GET_DASH_STREAMING_SESSION_URL"
|
||||
res = kvs_client.get_data_endpoint(StreamName=stream_name, APIName=api_name)
|
||||
data_endpoint = res["DataEndpoint"]
|
||||
|
||||
client = boto3.client(
|
||||
"kinesis-video-archived-media",
|
||||
region_name=region_name,
|
||||
endpoint_url=data_endpoint,
|
||||
)
|
||||
end_timestamp = datetime.utcnow() - timedelta(hours=1)
|
||||
start_timestamp = end_timestamp - timedelta(minutes=5)
|
||||
res = client.get_clip(
|
||||
StreamName=stream_name,
|
||||
ClipFragmentSelector={
|
||||
"FragmentSelectorType": "PRODUCER_TIMESTAMP",
|
||||
"TimestampRange": {
|
||||
"StartTimestamp": start_timestamp,
|
||||
"EndTimestamp": end_timestamp,
|
||||
},
|
||||
},
|
||||
)
|
||||
res.should.have.key("ContentType").which.should.match("video/mp4")
|
||||
res.should.have.key("Payload")
|
19
tests/test_kinesisvideoarchivedmedia/test_server.py
Normal file
19
tests/test_kinesisvideoarchivedmedia/test_server.py
Normal file
@ -0,0 +1,19 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_kinesisvideoarchivedmedia
|
||||
|
||||
"""
|
||||
Test the different server responses
|
||||
"""
|
||||
|
||||
|
||||
@mock_kinesisvideoarchivedmedia
|
||||
def test_kinesisvideoarchivedmedia_server_is_up():
|
||||
backend = server.create_backend_app("kinesis-video-archived-media")
|
||||
test_client = backend.test_client()
|
||||
res = test_client.post("/getHLSStreamingSessionURL")
|
||||
# Just checking server is up
|
||||
res.status_code.should.equal(404)
|
@ -2,16 +2,14 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
|
||||
import os
|
||||
from boto3 import Session
|
||||
from six.moves.urllib.request import urlopen
|
||||
from six.moves.urllib.error import HTTPError
|
||||
from functools import wraps
|
||||
from gzip import GzipFile
|
||||
from io import BytesIO
|
||||
import mimetypes
|
||||
import zlib
|
||||
import pickle
|
||||
import uuid
|
||||
@ -36,7 +34,7 @@ from nose.tools import assert_raises
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto import settings, mock_s3, mock_s3_deprecated, mock_config, mock_cloudformation
|
||||
from moto import settings, mock_s3, mock_s3_deprecated, mock_config
|
||||
import moto.s3.models as s3model
|
||||
from moto.core.exceptions import InvalidNextTokenException
|
||||
from moto.core.utils import py2_strip_unicode_keys
|
||||
@ -1056,6 +1054,29 @@ def test_streaming_upload_from_file_to_presigned_url():
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_multipart_upload_from_file_to_presigned_url():
|
||||
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||
s3.create_bucket(Bucket="mybucket")
|
||||
|
||||
params = {"Bucket": "mybucket", "Key": "file_upload"}
|
||||
presigned_url = boto3.client("s3").generate_presigned_url(
|
||||
"put_object", params, ExpiresIn=900
|
||||
)
|
||||
|
||||
file = open("text.txt", "w")
|
||||
file.write("test")
|
||||
file.close()
|
||||
files = {"upload_file": open("text.txt", "rb")}
|
||||
|
||||
requests.put(presigned_url, files=files)
|
||||
resp = s3.get_object(Bucket="mybucket", Key="file_upload")
|
||||
data = resp["Body"].read()
|
||||
assert data == b"test"
|
||||
# cleanup
|
||||
os.remove("text.txt")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_s3_object_in_private_bucket():
|
||||
s3 = boto3.resource("s3")
|
||||
@ -2779,6 +2800,39 @@ def test_put_bucket_acl_body():
|
||||
assert not result.get("Grants")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_object_acl_with_presigned_post():
|
||||
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||
|
||||
bucket_name = "imageS3Bucket"
|
||||
object_name = "text.txt"
|
||||
fields = {"acl": "public-read"}
|
||||
file = open("text.txt", "w")
|
||||
file.write("test")
|
||||
file.close()
|
||||
|
||||
s3.create_bucket(Bucket=bucket_name)
|
||||
response = s3.generate_presigned_post(
|
||||
bucket_name, object_name, Fields=fields, ExpiresIn=60000
|
||||
)
|
||||
|
||||
with open(object_name, "rb") as f:
|
||||
files = {"file": (object_name, f)}
|
||||
requests.post(response["url"], data=response["fields"], files=files)
|
||||
|
||||
response = s3.get_object_acl(Bucket=bucket_name, Key=object_name)
|
||||
|
||||
assert "Grants" in response
|
||||
assert len(response["Grants"]) == 2
|
||||
assert response["Grants"][1]["Permission"] == "READ"
|
||||
|
||||
response = s3.get_object(Bucket=bucket_name, Key=object_name)
|
||||
|
||||
assert "ETag" in response
|
||||
assert "Body" in response
|
||||
os.remove("text.txt")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_put_bucket_notification():
|
||||
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||
@ -4686,142 +4740,3 @@ def test_presigned_put_url_with_custom_headers():
|
||||
|
||||
s3.delete_object(Bucket=bucket, Key=key)
|
||||
s3.delete_bucket(Bucket=bucket)
|
||||
|
||||
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_s3_bucket_cloudformation_basic():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket", "Properties": {},}},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[
|
||||
"StackId"
|
||||
]
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
|
||||
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
|
||||
|
||||
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_s3_bucket_cloudformation_with_properties():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
bucket_name = "MyBucket"
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testInstance": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"Properties": {
|
||||
"BucketName": bucket_name,
|
||||
"BucketEncryption": {
|
||||
"ServerSideEncryptionConfiguration": [
|
||||
{
|
||||
"ServerSideEncryptionByDefault": {
|
||||
"SSEAlgorithm": "AES256"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[
|
||||
"StackId"
|
||||
]
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
s3.head_bucket(Bucket=bucket_name)
|
||||
|
||||
encryption = s3.get_bucket_encryption(Bucket=bucket_name)
|
||||
encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
|
||||
"ApplyServerSideEncryptionByDefault"
|
||||
]["SSEAlgorithm"].should.equal("AES256")
|
||||
|
||||
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_s3_bucket_cloudformation_update_no_interruption():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
cf.create_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testInstance": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"Properties": {
|
||||
"BucketEncryption": {
|
||||
"ServerSideEncryptionConfiguration": [
|
||||
{
|
||||
"ServerSideEncryptionByDefault": {
|
||||
"SSEAlgorithm": "AES256"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
cf.update_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
encryption = s3.get_bucket_encryption(
|
||||
Bucket=stack_description["Outputs"][0]["OutputValue"]
|
||||
)
|
||||
encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
|
||||
"ApplyServerSideEncryptionByDefault"
|
||||
]["SSEAlgorithm"].should.equal("AES256")
|
||||
|
||||
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_s3_bucket_cloudformation_update_replacement():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
cf.create_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testInstance": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"Properties": {"BucketName": "MyNewBucketName"},
|
||||
}
|
||||
},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
cf.update_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
|
||||
|
145
tests/test_s3/test_s3_cloudformation.py
Normal file
145
tests/test_s3/test_s3_cloudformation.py
Normal file
@ -0,0 +1,145 @@
|
||||
import json
|
||||
import boto3
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_s3, mock_cloudformation
|
||||
|
||||
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_s3_bucket_cloudformation_basic():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket", "Properties": {},}},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[
|
||||
"StackId"
|
||||
]
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
|
||||
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
|
||||
|
||||
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_s3_bucket_cloudformation_with_properties():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
bucket_name = "MyBucket"
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testInstance": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"Properties": {
|
||||
"BucketName": bucket_name,
|
||||
"BucketEncryption": {
|
||||
"ServerSideEncryptionConfiguration": [
|
||||
{
|
||||
"ServerSideEncryptionByDefault": {
|
||||
"SSEAlgorithm": "AES256"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
stack_id = cf.create_stack(StackName="test_stack", TemplateBody=template_json)[
|
||||
"StackId"
|
||||
]
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
s3.head_bucket(Bucket=bucket_name)
|
||||
|
||||
encryption = s3.get_bucket_encryption(Bucket=bucket_name)
|
||||
encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
|
||||
"ApplyServerSideEncryptionByDefault"
|
||||
]["SSEAlgorithm"].should.equal("AES256")
|
||||
|
||||
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_s3_bucket_cloudformation_update_no_interruption():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
cf.create_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testInstance": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"Properties": {
|
||||
"BucketEncryption": {
|
||||
"ServerSideEncryptionConfiguration": [
|
||||
{
|
||||
"ServerSideEncryptionByDefault": {
|
||||
"SSEAlgorithm": "AES256"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
cf.update_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
encryption = s3.get_bucket_encryption(
|
||||
Bucket=stack_description["Outputs"][0]["OutputValue"]
|
||||
)
|
||||
encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
|
||||
"ApplyServerSideEncryptionByDefault"
|
||||
]["SSEAlgorithm"].should.equal("AES256")
|
||||
|
||||
|
||||
@mock_s3
|
||||
@mock_cloudformation
|
||||
def test_s3_bucket_cloudformation_update_replacement():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {"testInstance": {"Type": "AWS::S3::Bucket"}},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
cf.create_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testInstance": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"Properties": {"BucketName": "MyNewBucketName"},
|
||||
}
|
||||
},
|
||||
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
|
||||
}
|
||||
template_json = json.dumps(template)
|
||||
cf.update_stack(StackName="test_stack", TemplateBody=template_json)
|
||||
stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
|
||||
s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
|
@ -45,6 +45,25 @@ sqs_template_with_tags = """
|
||||
}
|
||||
}"""
|
||||
|
||||
TEST_POLICY = """
|
||||
{
|
||||
"Version":"2012-10-17",
|
||||
"Statement":[
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Principal": { "AWS": "*" },
|
||||
"Action": "sqs:SendMessage",
|
||||
"Resource": "'$sqs_queue_arn'",
|
||||
"Condition":{
|
||||
"ArnEquals":{
|
||||
"aws:SourceArn":"'$sns_topic_arn'"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
@mock_sqs
|
||||
def test_create_fifo_queue_fail():
|
||||
@ -198,7 +217,8 @@ def test_get_queue_url_errors():
|
||||
client = boto3.client("sqs", region_name="us-east-1")
|
||||
|
||||
client.get_queue_url.when.called_with(QueueName="non-existing-queue").should.throw(
|
||||
ClientError, "The specified queue does not exist for this wsdl version."
|
||||
ClientError,
|
||||
"The specified queue non-existing-queue does not exist for this wsdl version.",
|
||||
)
|
||||
|
||||
|
||||
@ -206,10 +226,13 @@ def test_get_queue_url_errors():
|
||||
def test_get_nonexistent_queue():
|
||||
sqs = boto3.resource("sqs", region_name="us-east-1")
|
||||
with assert_raises(ClientError) as err:
|
||||
sqs.get_queue_by_name(QueueName="nonexisting-queue")
|
||||
sqs.get_queue_by_name(QueueName="non-existing-queue")
|
||||
ex = err.exception
|
||||
ex.operation_name.should.equal("GetQueueUrl")
|
||||
ex.response["Error"]["Code"].should.equal("AWS.SimpleQueueService.NonExistentQueue")
|
||||
ex.response["Error"]["Message"].should.equal(
|
||||
"The specified queue non-existing-queue does not exist for this wsdl version."
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as err:
|
||||
sqs.Queue("http://whatever-incorrect-queue-address").load()
|
||||
@ -1447,6 +1470,36 @@ def test_permissions():
|
||||
)
|
||||
|
||||
|
||||
@mock_sqs
|
||||
def test_get_queue_attributes_template_response_validation():
|
||||
client = boto3.client("sqs", region_name="us-east-1")
|
||||
|
||||
resp = client.create_queue(
|
||||
QueueName="test-dlr-queue.fifo", Attributes={"FifoQueue": "true"}
|
||||
)
|
||||
queue_url = resp["QueueUrl"]
|
||||
|
||||
attrs = client.get_queue_attributes(QueueUrl=queue_url, AttributeNames=["All"])
|
||||
assert attrs.get("Attributes").get("Policy") is None
|
||||
|
||||
attributes = {"Policy": TEST_POLICY}
|
||||
|
||||
client.set_queue_attributes(QueueUrl=queue_url, Attributes=attributes)
|
||||
attrs = client.get_queue_attributes(QueueUrl=queue_url, AttributeNames=["Policy"])
|
||||
assert attrs.get("Attributes").get("Policy") is not None
|
||||
|
||||
assert (
|
||||
json.loads(attrs.get("Attributes").get("Policy")).get("Version") == "2012-10-17"
|
||||
)
|
||||
assert len(json.loads(attrs.get("Attributes").get("Policy")).get("Statement")) == 1
|
||||
assert (
|
||||
json.loads(attrs.get("Attributes").get("Policy"))
|
||||
.get("Statement")[0]
|
||||
.get("Action")
|
||||
== "sqs:SendMessage"
|
||||
)
|
||||
|
||||
|
||||
@mock_sqs
|
||||
def test_add_permission_errors():
|
||||
client = boto3.client("sqs", region_name="us-east-1")
|
||||
|
@ -1,8 +1,8 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
import json
|
||||
import sure # noqa
|
||||
import datetime
|
||||
|
||||
from datetime import datetime
|
||||
from botocore.exceptions import ClientError
|
||||
@ -134,7 +134,7 @@ def test_state_machine_creation_fails_with_invalid_names():
|
||||
#
|
||||
|
||||
for invalid_name in invalid_names:
|
||||
with assert_raises(ClientError) as exc:
|
||||
with assert_raises(ClientError):
|
||||
client.create_state_machine(
|
||||
name=invalid_name,
|
||||
definition=str(simple_definition),
|
||||
@ -147,7 +147,7 @@ def test_state_machine_creation_requires_valid_role_arn():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
name = "example_step_function"
|
||||
#
|
||||
with assert_raises(ClientError) as exc:
|
||||
with assert_raises(ClientError):
|
||||
client.create_state_machine(
|
||||
name=name,
|
||||
definition=str(simple_definition),
|
||||
@ -242,7 +242,7 @@ def test_state_machine_creation_can_be_described():
|
||||
def test_state_machine_throws_error_when_describing_unknown_machine():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
with assert_raises(ClientError) as exc:
|
||||
with assert_raises(ClientError):
|
||||
unknown_state_machine = (
|
||||
"arn:aws:states:"
|
||||
+ region
|
||||
@ -258,7 +258,7 @@ def test_state_machine_throws_error_when_describing_unknown_machine():
|
||||
def test_state_machine_throws_error_when_describing_bad_arn():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
with assert_raises(ClientError) as exc:
|
||||
with assert_raises(ClientError):
|
||||
client.describe_state_machine(stateMachineArn="bad")
|
||||
|
||||
|
||||
@ -267,7 +267,7 @@ def test_state_machine_throws_error_when_describing_bad_arn():
|
||||
def test_state_machine_throws_error_when_describing_machine_in_different_account():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
with assert_raises(ClientError) as exc:
|
||||
with assert_raises(ClientError):
|
||||
unknown_state_machine = (
|
||||
"arn:aws:states:" + region + ":000000000000:stateMachine:unknown"
|
||||
)
|
||||
@ -376,7 +376,7 @@ def test_state_machine_start_execution():
|
||||
def test_state_machine_start_execution_bad_arn_raises_exception():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
with assert_raises(ClientError) as exc:
|
||||
with assert_raises(ClientError):
|
||||
client.start_execution(stateMachineArn="bad")
|
||||
|
||||
|
||||
@ -404,6 +404,68 @@ def test_state_machine_start_execution_with_custom_name():
|
||||
execution["startDate"].should.be.a(datetime)
|
||||
|
||||
|
||||
@mock_stepfunctions
|
||||
@mock_sts
|
||||
def test_state_machine_start_execution_fails_on_duplicate_execution_name():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
sm = client.create_state_machine(
|
||||
name="name", definition=str(simple_definition), roleArn=_get_default_role()
|
||||
)
|
||||
execution_one = client.start_execution(
|
||||
stateMachineArn=sm["stateMachineArn"], name="execution_name"
|
||||
)
|
||||
#
|
||||
with assert_raises(ClientError) as exc:
|
||||
_ = client.start_execution(
|
||||
stateMachineArn=sm["stateMachineArn"], name="execution_name"
|
||||
)
|
||||
exc.exception.response["Error"]["Message"].should.equal(
|
||||
"Execution Already Exists: '" + execution_one["executionArn"] + "'"
|
||||
)
|
||||
|
||||
|
||||
@mock_stepfunctions
|
||||
@mock_sts
|
||||
def test_state_machine_start_execution_with_custom_input():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
sm = client.create_state_machine(
|
||||
name="name", definition=str(simple_definition), roleArn=_get_default_role()
|
||||
)
|
||||
execution_input = json.dumps({"input_key": "input_value"})
|
||||
execution = client.start_execution(
|
||||
stateMachineArn=sm["stateMachineArn"], input=execution_input
|
||||
)
|
||||
#
|
||||
execution["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
uuid_regex = "[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}"
|
||||
expected_exec_name = (
|
||||
"arn:aws:states:"
|
||||
+ region
|
||||
+ ":"
|
||||
+ _get_account_id()
|
||||
+ ":execution:name:"
|
||||
+ uuid_regex
|
||||
)
|
||||
execution["executionArn"].should.match(expected_exec_name)
|
||||
execution["startDate"].should.be.a(datetime)
|
||||
|
||||
|
||||
@mock_stepfunctions
|
||||
@mock_sts
|
||||
def test_state_machine_start_execution_with_invalid_input():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
sm = client.create_state_machine(
|
||||
name="name", definition=str(simple_definition), roleArn=_get_default_role()
|
||||
)
|
||||
with assert_raises(ClientError):
|
||||
_ = client.start_execution(stateMachineArn=sm["stateMachineArn"], input="")
|
||||
with assert_raises(ClientError):
|
||||
_ = client.start_execution(stateMachineArn=sm["stateMachineArn"], input="{")
|
||||
|
||||
|
||||
@mock_stepfunctions
|
||||
@mock_sts
|
||||
def test_state_machine_list_executions():
|
||||
@ -443,7 +505,7 @@ def test_state_machine_list_executions_when_none_exist():
|
||||
|
||||
@mock_stepfunctions
|
||||
@mock_sts
|
||||
def test_state_machine_describe_execution():
|
||||
def test_state_machine_describe_execution_with_no_input():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
sm = client.create_state_machine(
|
||||
@ -462,12 +524,36 @@ def test_state_machine_describe_execution():
|
||||
description.shouldnt.have("stopDate")
|
||||
|
||||
|
||||
@mock_stepfunctions
|
||||
@mock_sts
|
||||
def test_state_machine_describe_execution_with_custom_input():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
execution_input = json.dumps({"input_key": "input_val"})
|
||||
sm = client.create_state_machine(
|
||||
name="name", definition=str(simple_definition), roleArn=_get_default_role()
|
||||
)
|
||||
execution = client.start_execution(
|
||||
stateMachineArn=sm["stateMachineArn"], input=execution_input
|
||||
)
|
||||
description = client.describe_execution(executionArn=execution["executionArn"])
|
||||
#
|
||||
description["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||
description["executionArn"].should.equal(execution["executionArn"])
|
||||
description["input"].should.equal(execution_input)
|
||||
description["name"].shouldnt.be.empty
|
||||
description["startDate"].should.equal(execution["startDate"])
|
||||
description["stateMachineArn"].should.equal(sm["stateMachineArn"])
|
||||
description["status"].should.equal("RUNNING")
|
||||
description.shouldnt.have("stopDate")
|
||||
|
||||
|
||||
@mock_stepfunctions
|
||||
@mock_sts
|
||||
def test_execution_throws_error_when_describing_unknown_execution():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
with assert_raises(ClientError) as exc:
|
||||
with assert_raises(ClientError):
|
||||
unknown_execution = (
|
||||
"arn:aws:states:" + region + ":" + _get_account_id() + ":execution:unknown"
|
||||
)
|
||||
@ -498,7 +584,7 @@ def test_state_machine_can_be_described_by_execution():
|
||||
def test_state_machine_throws_error_when_describing_unknown_execution():
|
||||
client = boto3.client("stepfunctions", region_name=region)
|
||||
#
|
||||
with assert_raises(ClientError) as exc:
|
||||
with assert_raises(ClientError):
|
||||
unknown_execution = (
|
||||
"arn:aws:states:" + region + ":" + _get_account_id() + ":execution:unknown"
|
||||
)
|
||||
|
@ -1,9 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
pip install flask
|
||||
# TravisCI on bionic dist uses old version of Docker Engine
|
||||
# which is incompatibile with newer docker-py
|
||||
# See https://github.com/docker/docker-py/issues/2639
|
||||
pip install "docker>=2.5.1,<=4.2.2"
|
||||
pip install /moto/dist/moto*.gz
|
||||
moto_server -H 0.0.0.0 -p 5000
|
||||
pip install $(ls /moto/dist/moto*.gz)[server,all]
|
||||
moto_server -H 0.0.0.0 -p 5000
|
||||
|
Loading…
x
Reference in New Issue
Block a user