Merge remote-tracking branch 'upstream/master'
This commit is contained in:
commit
88596518f5
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.3.3
|
||||
current_version = 1.3.4
|
||||
|
||||
[bumpversion:file:setup.py]
|
||||
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -14,4 +14,5 @@ build/
|
||||
python_env
|
||||
.ropeproject/
|
||||
.pytest_cache/
|
||||
venv/
|
||||
|
||||
|
14
CHANGELOG.md
14
CHANGELOG.md
@ -1,6 +1,20 @@
|
||||
Moto Changelog
|
||||
===================
|
||||
|
||||
1.3.5
|
||||
-----
|
||||
|
||||
* Pin down botocore issue as temporary fix for #1793.
|
||||
* More features on secrets manager
|
||||
|
||||
1.3.4
|
||||
------
|
||||
|
||||
* IAM get account authorization details
|
||||
* adding account id to ManagedPolicy ARN
|
||||
* APIGateway usage plans and usage plan keys
|
||||
* ECR list images
|
||||
|
||||
1.3.3
|
||||
------
|
||||
|
||||
|
@ -58,7 +58,6 @@
|
||||
- [ ] get_room
|
||||
- [ ] get_room_skill_parameter
|
||||
- [ ] get_skill_group
|
||||
- [ ] list_device_events
|
||||
- [ ] list_skills
|
||||
- [ ] list_tags
|
||||
- [ ] put_room_skill_parameter
|
||||
@ -82,7 +81,7 @@
|
||||
- [ ] update_room
|
||||
- [ ] update_skill_group
|
||||
|
||||
## apigateway - 17% implemented
|
||||
## apigateway - 24% implemented
|
||||
- [ ] create_api_key
|
||||
- [ ] create_authorizer
|
||||
- [ ] create_base_path_mapping
|
||||
@ -95,8 +94,8 @@
|
||||
- [X] create_resource
|
||||
- [X] create_rest_api
|
||||
- [X] create_stage
|
||||
- [ ] create_usage_plan
|
||||
- [ ] create_usage_plan_key
|
||||
- [X] create_usage_plan
|
||||
- [X] create_usage_plan_key
|
||||
- [ ] create_vpc_link
|
||||
- [ ] delete_api_key
|
||||
- [ ] delete_authorizer
|
||||
@ -116,8 +115,8 @@
|
||||
- [X] delete_resource
|
||||
- [X] delete_rest_api
|
||||
- [ ] delete_stage
|
||||
- [ ] delete_usage_plan
|
||||
- [ ] delete_usage_plan_key
|
||||
- [X] delete_usage_plan
|
||||
- [X] delete_usage_plan_key
|
||||
- [ ] delete_vpc_link
|
||||
- [ ] flush_stage_authorizers_cache
|
||||
- [ ] flush_stage_cache
|
||||
@ -162,10 +161,10 @@
|
||||
- [X] get_stages
|
||||
- [ ] get_tags
|
||||
- [ ] get_usage
|
||||
- [ ] get_usage_plan
|
||||
- [ ] get_usage_plan_key
|
||||
- [ ] get_usage_plan_keys
|
||||
- [ ] get_usage_plans
|
||||
- [X] get_usage_plan
|
||||
- [X] get_usage_plan_key
|
||||
- [X] get_usage_plan_keys
|
||||
- [X] get_usage_plans
|
||||
- [ ] get_vpc_link
|
||||
- [ ] get_vpc_links
|
||||
- [ ] import_api_keys
|
||||
@ -352,7 +351,6 @@
|
||||
- [ ] delete_scaling_plan
|
||||
- [ ] describe_scaling_plan_resources
|
||||
- [ ] describe_scaling_plans
|
||||
- [ ] update_scaling_plan
|
||||
|
||||
## batch - 93% implemented
|
||||
- [ ] cancel_job
|
||||
@ -767,8 +765,6 @@
|
||||
- [ ] create_pipeline
|
||||
- [ ] delete_custom_action_type
|
||||
- [ ] delete_pipeline
|
||||
- [ ] delete_webhook
|
||||
- [ ] deregister_webhook_with_third_party
|
||||
- [ ] disable_stage_transition
|
||||
- [ ] enable_stage_transition
|
||||
- [ ] get_job_details
|
||||
@ -779,7 +775,6 @@
|
||||
- [ ] list_action_types
|
||||
- [ ] list_pipeline_executions
|
||||
- [ ] list_pipelines
|
||||
- [ ] list_webhooks
|
||||
- [ ] poll_for_jobs
|
||||
- [ ] poll_for_third_party_jobs
|
||||
- [ ] put_action_revision
|
||||
@ -788,8 +783,6 @@
|
||||
- [ ] put_job_success_result
|
||||
- [ ] put_third_party_job_failure_result
|
||||
- [ ] put_third_party_job_success_result
|
||||
- [ ] put_webhook
|
||||
- [ ] register_webhook_with_third_party
|
||||
- [ ] retry_stage_execution
|
||||
- [ ] start_pipeline_execution
|
||||
- [ ] update_pipeline
|
||||
@ -814,17 +807,17 @@
|
||||
- [ ] update_team_member
|
||||
- [ ] update_user_profile
|
||||
|
||||
## cognito-identity - 22% implemented
|
||||
- [X] create_identity_pool
|
||||
## cognito-identity - 0% implemented
|
||||
- [ ] create_identity_pool
|
||||
- [ ] delete_identities
|
||||
- [ ] delete_identity_pool
|
||||
- [ ] describe_identity
|
||||
- [ ] describe_identity_pool
|
||||
- [X] get_credentials_for_identity
|
||||
- [X] get_id
|
||||
- [ ] get_credentials_for_identity
|
||||
- [ ] get_id
|
||||
- [ ] get_identity_pool_roles
|
||||
- [ ] get_open_id_token
|
||||
- [X] get_open_id_token_for_developer_identity
|
||||
- [ ] get_open_id_token_for_developer_identity
|
||||
- [ ] list_identities
|
||||
- [ ] list_identity_pools
|
||||
- [ ] lookup_developer_identity
|
||||
@ -834,20 +827,20 @@
|
||||
- [ ] unlink_identity
|
||||
- [ ] update_identity_pool
|
||||
|
||||
## cognito-idp - 25% implemented
|
||||
## cognito-idp - 0% implemented
|
||||
- [ ] add_custom_attributes
|
||||
- [ ] admin_add_user_to_group
|
||||
- [ ] admin_confirm_sign_up
|
||||
- [X] admin_create_user
|
||||
- [X] admin_delete_user
|
||||
- [ ] admin_create_user
|
||||
- [ ] admin_delete_user
|
||||
- [ ] admin_delete_user_attributes
|
||||
- [ ] admin_disable_provider_for_user
|
||||
- [ ] admin_disable_user
|
||||
- [ ] admin_enable_user
|
||||
- [ ] admin_forget_device
|
||||
- [ ] admin_get_device
|
||||
- [X] admin_get_user
|
||||
- [X] admin_initiate_auth
|
||||
- [ ] admin_get_user
|
||||
- [ ] admin_initiate_auth
|
||||
- [ ] admin_link_provider_for_user
|
||||
- [ ] admin_list_devices
|
||||
- [ ] admin_list_groups_for_user
|
||||
@ -862,32 +855,32 @@
|
||||
- [ ] admin_update_user_attributes
|
||||
- [ ] admin_user_global_sign_out
|
||||
- [ ] associate_software_token
|
||||
- [X] change_password
|
||||
- [ ] change_password
|
||||
- [ ] confirm_device
|
||||
- [X] confirm_forgot_password
|
||||
- [ ] confirm_forgot_password
|
||||
- [ ] confirm_sign_up
|
||||
- [ ] create_group
|
||||
- [X] create_identity_provider
|
||||
- [ ] create_identity_provider
|
||||
- [ ] create_resource_server
|
||||
- [ ] create_user_import_job
|
||||
- [X] create_user_pool
|
||||
- [X] create_user_pool_client
|
||||
- [X] create_user_pool_domain
|
||||
- [ ] create_user_pool
|
||||
- [ ] create_user_pool_client
|
||||
- [ ] create_user_pool_domain
|
||||
- [ ] delete_group
|
||||
- [X] delete_identity_provider
|
||||
- [ ] delete_identity_provider
|
||||
- [ ] delete_resource_server
|
||||
- [ ] delete_user
|
||||
- [ ] delete_user_attributes
|
||||
- [X] delete_user_pool
|
||||
- [X] delete_user_pool_client
|
||||
- [X] delete_user_pool_domain
|
||||
- [X] describe_identity_provider
|
||||
- [ ] delete_user_pool
|
||||
- [ ] delete_user_pool_client
|
||||
- [ ] delete_user_pool_domain
|
||||
- [ ] describe_identity_provider
|
||||
- [ ] describe_resource_server
|
||||
- [ ] describe_risk_configuration
|
||||
- [ ] describe_user_import_job
|
||||
- [X] describe_user_pool
|
||||
- [X] describe_user_pool_client
|
||||
- [X] describe_user_pool_domain
|
||||
- [ ] describe_user_pool
|
||||
- [ ] describe_user_pool_client
|
||||
- [ ] describe_user_pool_domain
|
||||
- [ ] forget_device
|
||||
- [ ] forgot_password
|
||||
- [ ] get_csv_header
|
||||
@ -903,15 +896,15 @@
|
||||
- [ ] initiate_auth
|
||||
- [ ] list_devices
|
||||
- [ ] list_groups
|
||||
- [X] list_identity_providers
|
||||
- [ ] list_identity_providers
|
||||
- [ ] list_resource_servers
|
||||
- [ ] list_user_import_jobs
|
||||
- [X] list_user_pool_clients
|
||||
- [X] list_user_pools
|
||||
- [X] list_users
|
||||
- [ ] list_user_pool_clients
|
||||
- [ ] list_user_pools
|
||||
- [ ] list_users
|
||||
- [ ] list_users_in_group
|
||||
- [ ] resend_confirmation_code
|
||||
- [X] respond_to_auth_challenge
|
||||
- [ ] respond_to_auth_challenge
|
||||
- [ ] set_risk_configuration
|
||||
- [ ] set_ui_customization
|
||||
- [ ] set_user_mfa_preference
|
||||
@ -927,7 +920,7 @@
|
||||
- [ ] update_resource_server
|
||||
- [ ] update_user_attributes
|
||||
- [ ] update_user_pool
|
||||
- [X] update_user_pool_client
|
||||
- [ ] update_user_pool_client
|
||||
- [ ] verify_software_token
|
||||
- [ ] verify_user_attribute
|
||||
|
||||
@ -1065,7 +1058,6 @@
|
||||
- [ ] create_project
|
||||
- [ ] create_remote_access_session
|
||||
- [ ] create_upload
|
||||
- [ ] create_vpce_configuration
|
||||
- [ ] delete_device_pool
|
||||
- [ ] delete_instance_profile
|
||||
- [ ] delete_network_profile
|
||||
@ -1073,7 +1065,6 @@
|
||||
- [ ] delete_remote_access_session
|
||||
- [ ] delete_run
|
||||
- [ ] delete_upload
|
||||
- [ ] delete_vpce_configuration
|
||||
- [ ] get_account_settings
|
||||
- [ ] get_device
|
||||
- [ ] get_device_instance
|
||||
@ -1089,7 +1080,6 @@
|
||||
- [ ] get_suite
|
||||
- [ ] get_test
|
||||
- [ ] get_upload
|
||||
- [ ] get_vpce_configuration
|
||||
- [ ] install_to_remote_access_session
|
||||
- [ ] list_artifacts
|
||||
- [ ] list_device_instances
|
||||
@ -1109,7 +1099,6 @@
|
||||
- [ ] list_tests
|
||||
- [ ] list_unique_problems
|
||||
- [ ] list_uploads
|
||||
- [ ] list_vpce_configurations
|
||||
- [ ] purchase_offering
|
||||
- [ ] renew_offering
|
||||
- [ ] schedule_run
|
||||
@ -1120,7 +1109,6 @@
|
||||
- [ ] update_instance_profile
|
||||
- [ ] update_network_profile
|
||||
- [ ] update_project
|
||||
- [ ] update_vpce_configuration
|
||||
|
||||
## directconnect - 0% implemented
|
||||
- [ ] allocate_connection_on_interconnect
|
||||
@ -1277,7 +1265,7 @@
|
||||
- [ ] update_radius
|
||||
- [ ] verify_trust
|
||||
|
||||
## dynamodb - 21% implemented
|
||||
## dynamodb - 22% implemented
|
||||
- [ ] batch_get_item
|
||||
- [ ] batch_write_item
|
||||
- [ ] create_backup
|
||||
@ -1289,7 +1277,6 @@
|
||||
- [ ] describe_backup
|
||||
- [ ] describe_continuous_backups
|
||||
- [ ] describe_global_table
|
||||
- [ ] describe_global_table_settings
|
||||
- [ ] describe_limits
|
||||
- [ ] describe_table
|
||||
- [ ] describe_time_to_live
|
||||
@ -1307,7 +1294,6 @@
|
||||
- [ ] untag_resource
|
||||
- [ ] update_continuous_backups
|
||||
- [ ] update_global_table
|
||||
- [ ] update_global_table_settings
|
||||
- [ ] update_item
|
||||
- [ ] update_table
|
||||
- [ ] update_time_to_live
|
||||
@ -1318,7 +1304,7 @@
|
||||
- [ ] get_shard_iterator
|
||||
- [ ] list_streams
|
||||
|
||||
## ec2 - 36% implemented
|
||||
## ec2 - 37% implemented
|
||||
- [ ] accept_reserved_instances_exchange_quote
|
||||
- [ ] accept_vpc_endpoint_connections
|
||||
- [X] accept_vpc_peering_connection
|
||||
@ -1356,7 +1342,6 @@
|
||||
- [ ] create_default_vpc
|
||||
- [X] create_dhcp_options
|
||||
- [ ] create_egress_only_internet_gateway
|
||||
- [ ] create_fleet
|
||||
- [ ] create_flow_logs
|
||||
- [ ] create_fpga_image
|
||||
- [X] create_image
|
||||
@ -1391,7 +1376,6 @@
|
||||
- [X] delete_customer_gateway
|
||||
- [ ] delete_dhcp_options
|
||||
- [ ] delete_egress_only_internet_gateway
|
||||
- [ ] delete_fleets
|
||||
- [ ] delete_flow_logs
|
||||
- [ ] delete_fpga_image
|
||||
- [X] delete_internet_gateway
|
||||
@ -1433,9 +1417,6 @@
|
||||
- [ ] describe_egress_only_internet_gateways
|
||||
- [ ] describe_elastic_gpus
|
||||
- [ ] describe_export_tasks
|
||||
- [ ] describe_fleet_history
|
||||
- [ ] describe_fleet_instances
|
||||
- [ ] describe_fleets
|
||||
- [ ] describe_flow_logs
|
||||
- [ ] describe_fpga_image_attribute
|
||||
- [ ] describe_fpga_images
|
||||
@ -1532,7 +1513,6 @@
|
||||
- [X] import_key_pair
|
||||
- [ ] import_snapshot
|
||||
- [ ] import_volume
|
||||
- [ ] modify_fleet
|
||||
- [ ] modify_fpga_image_attribute
|
||||
- [ ] modify_hosts
|
||||
- [ ] modify_id_format
|
||||
@ -1905,11 +1885,8 @@
|
||||
- [ ] delete_delivery_stream
|
||||
- [ ] describe_delivery_stream
|
||||
- [ ] list_delivery_streams
|
||||
- [ ] list_tags_for_delivery_stream
|
||||
- [ ] put_record
|
||||
- [ ] put_record_batch
|
||||
- [ ] tag_delivery_stream
|
||||
- [ ] untag_delivery_stream
|
||||
- [ ] update_destination
|
||||
|
||||
## fms - 0% implemented
|
||||
@ -2231,7 +2208,7 @@
|
||||
- [ ] describe_event_types
|
||||
- [ ] describe_events
|
||||
|
||||
## iam - 47% implemented
|
||||
## iam - 48% implemented
|
||||
- [ ] add_client_id_to_open_id_connect_provider
|
||||
- [X] add_role_to_instance_profile
|
||||
- [X] add_user_to_group
|
||||
@ -2281,7 +2258,7 @@
|
||||
- [X] enable_mfa_device
|
||||
- [ ] generate_credential_report
|
||||
- [ ] get_access_key_last_used
|
||||
- [ ] get_account_authorization_details
|
||||
- [X] get_account_authorization_details
|
||||
- [ ] get_account_password_policy
|
||||
- [ ] get_account_summary
|
||||
- [ ] get_context_keys_for_custom_policy
|
||||
@ -2524,11 +2501,11 @@
|
||||
- [X] update_thing_group
|
||||
- [X] update_thing_groups_for_thing
|
||||
|
||||
## iot-data - 100% implemented
|
||||
- [X] delete_thing_shadow
|
||||
- [X] get_thing_shadow
|
||||
- [X] publish
|
||||
- [X] update_thing_shadow
|
||||
## iot-data - 0% implemented
|
||||
- [ ] delete_thing_shadow
|
||||
- [ ] get_thing_shadow
|
||||
- [ ] publish
|
||||
- [ ] update_thing_shadow
|
||||
|
||||
## iot-jobs-data - 0% implemented
|
||||
- [ ] describe_job_execution
|
||||
@ -2536,38 +2513,6 @@
|
||||
- [ ] start_next_pending_job_execution
|
||||
- [ ] update_job_execution
|
||||
|
||||
## iotanalytics - 0% implemented
|
||||
- [ ] batch_put_message
|
||||
- [ ] cancel_pipeline_reprocessing
|
||||
- [ ] create_channel
|
||||
- [ ] create_dataset
|
||||
- [ ] create_dataset_content
|
||||
- [ ] create_datastore
|
||||
- [ ] create_pipeline
|
||||
- [ ] delete_channel
|
||||
- [ ] delete_dataset
|
||||
- [ ] delete_dataset_content
|
||||
- [ ] delete_datastore
|
||||
- [ ] delete_pipeline
|
||||
- [ ] describe_channel
|
||||
- [ ] describe_dataset
|
||||
- [ ] describe_datastore
|
||||
- [ ] describe_logging_options
|
||||
- [ ] describe_pipeline
|
||||
- [ ] get_dataset_content
|
||||
- [ ] list_channels
|
||||
- [ ] list_datasets
|
||||
- [ ] list_datastores
|
||||
- [ ] list_pipelines
|
||||
- [ ] put_logging_options
|
||||
- [ ] run_pipeline_activity
|
||||
- [ ] sample_channel_data
|
||||
- [ ] start_pipeline_reprocessing
|
||||
- [ ] update_channel
|
||||
- [ ] update_dataset
|
||||
- [ ] update_datastore
|
||||
- [ ] update_pipeline
|
||||
|
||||
## kinesis - 56% implemented
|
||||
- [X] add_tags_to_stream
|
||||
- [X] create_stream
|
||||
@ -3569,9 +3514,6 @@
|
||||
- [ ] update_tags_for_domain
|
||||
- [ ] view_billing
|
||||
|
||||
## runtime.sagemaker - 0% implemented
|
||||
- [ ] invoke_endpoint
|
||||
|
||||
## s3 - 15% implemented
|
||||
- [ ] abort_multipart_upload
|
||||
- [ ] complete_multipart_upload
|
||||
@ -3703,12 +3645,12 @@
|
||||
- [ ] put_attributes
|
||||
- [ ] select
|
||||
|
||||
## secretsmanager - 6% implemented
|
||||
## secretsmanager - 27% implemented
|
||||
- [ ] cancel_rotate_secret
|
||||
- [ ] create_secret
|
||||
- [X] create_secret
|
||||
- [ ] delete_secret
|
||||
- [ ] describe_secret
|
||||
- [ ] get_random_password
|
||||
- [X] describe_secret
|
||||
- [X] get_random_password
|
||||
- [X] get_secret_value
|
||||
- [ ] list_secret_version_ids
|
||||
- [ ] list_secrets
|
||||
@ -3984,7 +3926,7 @@
|
||||
- [X] tag_queue
|
||||
- [X] untag_queue
|
||||
|
||||
## ssm - 10% implemented
|
||||
## ssm - 11% implemented
|
||||
- [X] add_tags_to_resource
|
||||
- [ ] cancel_command
|
||||
- [ ] create_activation
|
||||
@ -3997,7 +3939,6 @@
|
||||
- [ ] delete_activation
|
||||
- [ ] delete_association
|
||||
- [ ] delete_document
|
||||
- [ ] delete_inventory
|
||||
- [ ] delete_maintenance_window
|
||||
- [X] delete_parameter
|
||||
- [X] delete_parameters
|
||||
@ -4021,7 +3962,6 @@
|
||||
- [ ] describe_instance_patch_states
|
||||
- [ ] describe_instance_patch_states_for_patch_group
|
||||
- [ ] describe_instance_patches
|
||||
- [ ] describe_inventory_deletions
|
||||
- [ ] describe_maintenance_window_execution_task_invocations
|
||||
- [ ] describe_maintenance_window_execution_tasks
|
||||
- [ ] describe_maintenance_window_executions
|
||||
@ -4053,7 +3993,7 @@
|
||||
- [ ] list_association_versions
|
||||
- [ ] list_associations
|
||||
- [ ] list_command_invocations
|
||||
- [ ] list_commands
|
||||
- [X] list_commands
|
||||
- [ ] list_compliance_items
|
||||
- [ ] list_compliance_summaries
|
||||
- [ ] list_document_versions
|
||||
@ -4464,36 +4404,25 @@
|
||||
- [ ] update_resource
|
||||
|
||||
## workspaces - 0% implemented
|
||||
- [ ] associate_ip_groups
|
||||
- [ ] authorize_ip_rules
|
||||
- [ ] create_ip_group
|
||||
- [ ] create_tags
|
||||
- [ ] create_workspaces
|
||||
- [ ] delete_ip_group
|
||||
- [ ] delete_tags
|
||||
- [ ] describe_ip_groups
|
||||
- [ ] describe_tags
|
||||
- [ ] describe_workspace_bundles
|
||||
- [ ] describe_workspace_directories
|
||||
- [ ] describe_workspaces
|
||||
- [ ] describe_workspaces_connection_status
|
||||
- [ ] disassociate_ip_groups
|
||||
- [ ] modify_workspace_properties
|
||||
- [ ] modify_workspace_state
|
||||
- [ ] reboot_workspaces
|
||||
- [ ] rebuild_workspaces
|
||||
- [ ] revoke_ip_rules
|
||||
- [ ] start_workspaces
|
||||
- [ ] stop_workspaces
|
||||
- [ ] terminate_workspaces
|
||||
- [ ] update_rules_of_ip_group
|
||||
|
||||
## xray - 0% implemented
|
||||
- [ ] batch_get_traces
|
||||
- [ ] get_encryption_config
|
||||
- [ ] get_service_graph
|
||||
- [ ] get_trace_graph
|
||||
- [ ] get_trace_summaries
|
||||
- [ ] put_encryption_config
|
||||
- [ ] put_telemetry_records
|
||||
- [ ] put_trace_segments
|
||||
|
@ -175,7 +175,7 @@ def test_add_servers():
|
||||
```
|
||||
|
||||
#### Using moto 1.0.X with boto2
|
||||
moto 1.0.X mock docorators are defined for boto3 and do not work with boto2. Use the @mock_AWSSVC_deprecated to work with boto2.
|
||||
moto 1.0.X mock decorators are defined for boto3 and do not work with boto2. Use the @mock_AWSSVC_deprecated to work with boto2.
|
||||
|
||||
Using moto with boto2
|
||||
```python
|
||||
|
@ -3,7 +3,7 @@ import logging
|
||||
# logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
__title__ = 'moto'
|
||||
__version__ = '1.3.3'
|
||||
__version__ = '1.3.5'
|
||||
|
||||
from .acm import mock_acm # flake8: noqa
|
||||
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
|
||||
@ -24,6 +24,7 @@ from .elbv2 import mock_elbv2 # flake8: noqa
|
||||
from .emr import mock_emr, mock_emr_deprecated # flake8: noqa
|
||||
from .events import mock_events # flake8: noqa
|
||||
from .glacier import mock_glacier, mock_glacier_deprecated # flake8: noqa
|
||||
from .glue import mock_glue # flake8: noqa
|
||||
from .iam import mock_iam, mock_iam_deprecated # flake8: noqa
|
||||
from .kinesis import mock_kinesis, mock_kinesis_deprecated # flake8: noqa
|
||||
from .kms import mock_kms, mock_kms_deprecated # flake8: noqa
|
||||
|
@ -8,3 +8,11 @@ class StageNotFoundException(RESTError):
|
||||
def __init__(self):
|
||||
super(StageNotFoundException, self).__init__(
|
||||
"NotFoundException", "Invalid stage identifier specified")
|
||||
|
||||
|
||||
class ApiKeyNotFoundException(RESTError):
|
||||
code = 404
|
||||
|
||||
def __init__(self):
|
||||
super(ApiKeyNotFoundException, self).__init__(
|
||||
"NotFoundException", "Invalid API Key identifier specified")
|
||||
|
@ -10,7 +10,7 @@ from boto3.session import Session
|
||||
import responses
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from .utils import create_id
|
||||
from .exceptions import StageNotFoundException
|
||||
from .exceptions import StageNotFoundException, ApiKeyNotFoundException
|
||||
|
||||
STAGE_URL = "https://{api_id}.execute-api.{region_name}.amazonaws.com/{stage_name}"
|
||||
|
||||
@ -300,11 +300,7 @@ class ApiKey(BaseModel, dict):
|
||||
generateDistinctId=False, value=None, stageKeys=None, customerId=None):
|
||||
super(ApiKey, self).__init__()
|
||||
self['id'] = create_id()
|
||||
if generateDistinctId:
|
||||
# Best guess of what AWS does internally
|
||||
self['value'] = ''.join(random.sample(string.ascii_letters + string.digits, 40))
|
||||
else:
|
||||
self['value'] = value
|
||||
self['value'] = value if value else ''.join(random.sample(string.ascii_letters + string.digits, 40))
|
||||
self['name'] = name
|
||||
self['customerId'] = customerId
|
||||
self['description'] = description
|
||||
@ -313,6 +309,29 @@ class ApiKey(BaseModel, dict):
|
||||
self['stageKeys'] = stageKeys
|
||||
|
||||
|
||||
class UsagePlan(BaseModel, dict):
|
||||
|
||||
def __init__(self, name=None, description=None, apiStages=[],
|
||||
throttle=None, quota=None):
|
||||
super(UsagePlan, self).__init__()
|
||||
self['id'] = create_id()
|
||||
self['name'] = name
|
||||
self['description'] = description
|
||||
self['apiStages'] = apiStages
|
||||
self['throttle'] = throttle
|
||||
self['quota'] = quota
|
||||
|
||||
|
||||
class UsagePlanKey(BaseModel, dict):
|
||||
|
||||
def __init__(self, id, type, name, value):
|
||||
super(UsagePlanKey, self).__init__()
|
||||
self['id'] = id
|
||||
self['name'] = name
|
||||
self['type'] = type
|
||||
self['value'] = value
|
||||
|
||||
|
||||
class RestAPI(BaseModel):
|
||||
|
||||
def __init__(self, id, region_name, name, description):
|
||||
@ -412,6 +431,8 @@ class APIGatewayBackend(BaseBackend):
|
||||
super(APIGatewayBackend, self).__init__()
|
||||
self.apis = {}
|
||||
self.keys = {}
|
||||
self.usage_plans = {}
|
||||
self.usage_plan_keys = {}
|
||||
self.region_name = region_name
|
||||
|
||||
def reset(self):
|
||||
@ -580,6 +601,48 @@ class APIGatewayBackend(BaseBackend):
|
||||
self.keys.pop(api_key_id)
|
||||
return {}
|
||||
|
||||
def create_usage_plan(self, payload):
|
||||
plan = UsagePlan(**payload)
|
||||
self.usage_plans[plan['id']] = plan
|
||||
return plan
|
||||
|
||||
def get_usage_plans(self):
|
||||
return list(self.usage_plans.values())
|
||||
|
||||
def get_usage_plan(self, usage_plan_id):
|
||||
return self.usage_plans[usage_plan_id]
|
||||
|
||||
def delete_usage_plan(self, usage_plan_id):
|
||||
self.usage_plans.pop(usage_plan_id)
|
||||
return {}
|
||||
|
||||
def create_usage_plan_key(self, usage_plan_id, payload):
|
||||
if usage_plan_id not in self.usage_plan_keys:
|
||||
self.usage_plan_keys[usage_plan_id] = {}
|
||||
|
||||
key_id = payload["keyId"]
|
||||
if key_id not in self.keys:
|
||||
raise ApiKeyNotFoundException()
|
||||
|
||||
api_key = self.keys[key_id]
|
||||
|
||||
usage_plan_key = UsagePlanKey(id=key_id, type=payload["keyType"], name=api_key["name"], value=api_key["value"])
|
||||
self.usage_plan_keys[usage_plan_id][usage_plan_key['id']] = usage_plan_key
|
||||
return usage_plan_key
|
||||
|
||||
def get_usage_plan_keys(self, usage_plan_id):
|
||||
if usage_plan_id not in self.usage_plan_keys:
|
||||
return []
|
||||
|
||||
return list(self.usage_plan_keys[usage_plan_id].values())
|
||||
|
||||
def get_usage_plan_key(self, usage_plan_id, key_id):
|
||||
return self.usage_plan_keys[usage_plan_id][key_id]
|
||||
|
||||
def delete_usage_plan_key(self, usage_plan_id, key_id):
|
||||
self.usage_plan_keys[usage_plan_id].pop(key_id)
|
||||
return {}
|
||||
|
||||
|
||||
apigateway_backends = {}
|
||||
for region_name in Session().get_available_regions('apigateway'):
|
||||
|
@ -4,7 +4,7 @@ import json
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from .models import apigateway_backends
|
||||
from .exceptions import StageNotFoundException
|
||||
from .exceptions import StageNotFoundException, ApiKeyNotFoundException
|
||||
|
||||
|
||||
class APIGatewayResponse(BaseResponse):
|
||||
@ -248,3 +248,56 @@ class APIGatewayResponse(BaseResponse):
|
||||
elif self.method == 'DELETE':
|
||||
apikey_response = self.backend.delete_apikey(apikey)
|
||||
return 200, {}, json.dumps(apikey_response)
|
||||
|
||||
def usage_plans(self, request, full_url, headers):
|
||||
self.setup_class(request, full_url, headers)
|
||||
|
||||
if self.method == 'POST':
|
||||
usage_plan_response = self.backend.create_usage_plan(json.loads(self.body))
|
||||
elif self.method == 'GET':
|
||||
usage_plans_response = self.backend.get_usage_plans()
|
||||
return 200, {}, json.dumps({"item": usage_plans_response})
|
||||
return 200, {}, json.dumps(usage_plan_response)
|
||||
|
||||
def usage_plan_individual(self, request, full_url, headers):
|
||||
self.setup_class(request, full_url, headers)
|
||||
|
||||
url_path_parts = self.path.split("/")
|
||||
usage_plan = url_path_parts[2]
|
||||
|
||||
if self.method == 'GET':
|
||||
usage_plan_response = self.backend.get_usage_plan(usage_plan)
|
||||
elif self.method == 'DELETE':
|
||||
usage_plan_response = self.backend.delete_usage_plan(usage_plan)
|
||||
return 200, {}, json.dumps(usage_plan_response)
|
||||
|
||||
def usage_plan_keys(self, request, full_url, headers):
|
||||
self.setup_class(request, full_url, headers)
|
||||
|
||||
url_path_parts = self.path.split("/")
|
||||
usage_plan_id = url_path_parts[2]
|
||||
|
||||
if self.method == 'POST':
|
||||
try:
|
||||
usage_plan_response = self.backend.create_usage_plan_key(usage_plan_id, json.loads(self.body))
|
||||
except ApiKeyNotFoundException as error:
|
||||
return error.code, {}, '{{"message":"{0}","code":"{1}"}}'.format(error.message, error.error_type)
|
||||
|
||||
elif self.method == 'GET':
|
||||
usage_plans_response = self.backend.get_usage_plan_keys(usage_plan_id)
|
||||
return 200, {}, json.dumps({"item": usage_plans_response})
|
||||
|
||||
return 200, {}, json.dumps(usage_plan_response)
|
||||
|
||||
def usage_plan_key_individual(self, request, full_url, headers):
|
||||
self.setup_class(request, full_url, headers)
|
||||
|
||||
url_path_parts = self.path.split("/")
|
||||
usage_plan_id = url_path_parts[2]
|
||||
key_id = url_path_parts[4]
|
||||
|
||||
if self.method == 'GET':
|
||||
usage_plan_response = self.backend.get_usage_plan_key(usage_plan_id, key_id)
|
||||
elif self.method == 'DELETE':
|
||||
usage_plan_response = self.backend.delete_usage_plan_key(usage_plan_id, key_id)
|
||||
return 200, {}, json.dumps(usage_plan_response)
|
||||
|
@ -20,4 +20,8 @@ url_paths = {
|
||||
'{0}/restapis/(?P<function_id>[^/]+)/resources/(?P<resource_id>[^/]+)/methods/(?P<method_name>[^/]+)/integration/responses/(?P<status_code>\d+)/?$': APIGatewayResponse().integration_responses,
|
||||
'{0}/apikeys$': APIGatewayResponse().apikeys,
|
||||
'{0}/apikeys/(?P<apikey>[^/]+)': APIGatewayResponse().apikey_individual,
|
||||
'{0}/usageplans$': APIGatewayResponse().usage_plans,
|
||||
'{0}/usageplans/(?P<usage_plan_id>[^/]+)/?$': APIGatewayResponse().usage_plan_individual,
|
||||
'{0}/usageplans/(?P<usage_plan_id>[^/]+)/keys$': APIGatewayResponse().usage_plan_keys,
|
||||
'{0}/usageplans/(?P<usage_plan_id>[^/]+)/keys/(?P<api_key_id>[^/]+)/?$': APIGatewayResponse().usage_plan_key_individual,
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ from moto.elbv2 import elbv2_backends
|
||||
from moto.emr import emr_backends
|
||||
from moto.events import events_backends
|
||||
from moto.glacier import glacier_backends
|
||||
from moto.glue import glue_backends
|
||||
from moto.iam import iam_backends
|
||||
from moto.instance_metadata import instance_metadata_backends
|
||||
from moto.kinesis import kinesis_backends
|
||||
@ -65,6 +66,7 @@ BACKENDS = {
|
||||
'events': events_backends,
|
||||
'emr': emr_backends,
|
||||
'glacier': glacier_backends,
|
||||
'glue': glue_backends,
|
||||
'iam': iam_backends,
|
||||
'moto_api': moto_api_backends,
|
||||
'instance_metadata': instance_metadata_backends,
|
||||
|
@ -29,8 +29,10 @@ COMPARISON_FUNCS = {
|
||||
'GT': GT_FUNCTION,
|
||||
'>': GT_FUNCTION,
|
||||
|
||||
'NULL': lambda item_value: item_value is None,
|
||||
'NOT_NULL': lambda item_value: item_value is not None,
|
||||
# NULL means the value should not exist at all
|
||||
'NULL': lambda item_value: False,
|
||||
# NOT_NULL means the value merely has to exist, and values of None are valid
|
||||
'NOT_NULL': lambda item_value: True,
|
||||
'CONTAINS': lambda item_value, test_value: test_value in item_value,
|
||||
'NOT_CONTAINS': lambda item_value, test_value: test_value not in item_value,
|
||||
'BEGINS_WITH': lambda item_value, test_value: item_value.startswith(test_value),
|
||||
|
@ -409,7 +409,8 @@ class Table(BaseModel):
|
||||
current_attr = current
|
||||
|
||||
for key, val in expected.items():
|
||||
if 'Exists' in val and val['Exists'] is False:
|
||||
if 'Exists' in val and val['Exists'] is False \
|
||||
or 'ComparisonOperator' in val and val['ComparisonOperator'] == 'NULL':
|
||||
if key in current_attr:
|
||||
raise ValueError("The conditional request failed")
|
||||
elif key not in current_attr:
|
||||
@ -419,8 +420,10 @@ class Table(BaseModel):
|
||||
elif 'ComparisonOperator' in val:
|
||||
comparison_func = get_comparison_func(
|
||||
val['ComparisonOperator'])
|
||||
dynamo_types = [DynamoType(ele) for ele in val[
|
||||
"AttributeValueList"]]
|
||||
dynamo_types = [
|
||||
DynamoType(ele) for ele in
|
||||
val.get("AttributeValueList", [])
|
||||
]
|
||||
for t in dynamo_types:
|
||||
if not comparison_func(current_attr[key].value, t.value):
|
||||
raise ValueError('The conditional request failed')
|
||||
@ -827,7 +830,8 @@ class DynamoDBBackend(BaseBackend):
|
||||
expected = {}
|
||||
|
||||
for key, val in expected.items():
|
||||
if 'Exists' in val and val['Exists'] is False:
|
||||
if 'Exists' in val and val['Exists'] is False \
|
||||
or 'ComparisonOperator' in val and val['ComparisonOperator'] == 'NULL':
|
||||
if key in item_attr:
|
||||
raise ValueError("The conditional request failed")
|
||||
elif key not in item_attr:
|
||||
@ -837,8 +841,10 @@ class DynamoDBBackend(BaseBackend):
|
||||
elif 'ComparisonOperator' in val:
|
||||
comparison_func = get_comparison_func(
|
||||
val['ComparisonOperator'])
|
||||
dynamo_types = [DynamoType(ele) for ele in val[
|
||||
"AttributeValueList"]]
|
||||
dynamo_types = [
|
||||
DynamoType(ele) for ele in
|
||||
val.get("AttributeValueList", [])
|
||||
]
|
||||
for t in dynamo_types:
|
||||
if not comparison_func(item_attr[key].value, t.value):
|
||||
raise ValueError('The conditional request failed')
|
||||
|
@ -209,17 +209,22 @@ class ECRBackend(BaseBackend):
|
||||
"""
|
||||
maxResults and filtering not implemented
|
||||
"""
|
||||
images = []
|
||||
for repository in self.repositories.values():
|
||||
if repository_name:
|
||||
if repository.name != repository_name:
|
||||
continue
|
||||
repository = None
|
||||
found = False
|
||||
if repository_name in self.repositories:
|
||||
repository = self.repositories[repository_name]
|
||||
if registry_id:
|
||||
if repository.registry_id != registry_id:
|
||||
continue
|
||||
if repository.registry_id == registry_id:
|
||||
found = True
|
||||
else:
|
||||
found = True
|
||||
|
||||
for image in repository.images:
|
||||
images.append(image)
|
||||
if not found:
|
||||
raise RepositoryNotFoundException(repository_name, registry_id or DEFAULT_REGISTRY_ID)
|
||||
|
||||
images = []
|
||||
for image in repository.images:
|
||||
images.append(image)
|
||||
return images
|
||||
|
||||
def describe_images(self, repository_name, registry_id=None, image_ids=None):
|
||||
|
11
moto/ecs/exceptions.py
Normal file
11
moto/ecs/exceptions.py
Normal file
@ -0,0 +1,11 @@
|
||||
from __future__ import unicode_literals
|
||||
from moto.core.exceptions import RESTError
|
||||
|
||||
|
||||
class ServiceNotFoundException(RESTError):
|
||||
code = 400
|
||||
|
||||
def __init__(self, service_name):
|
||||
super(ServiceNotFoundException, self).__init__(
|
||||
error_type="ServiceNotFoundException",
|
||||
message="The service {0} does not exist".format(service_name))
|
@ -10,6 +10,8 @@ from moto.core import BaseBackend, BaseModel
|
||||
from moto.ec2 import ec2_backends
|
||||
from copy import copy
|
||||
|
||||
from .exceptions import ServiceNotFoundException
|
||||
|
||||
|
||||
class BaseObject(BaseModel):
|
||||
|
||||
@ -601,8 +603,9 @@ class EC2ContainerServiceBackend(BaseBackend):
|
||||
raise Exception("tasks cannot be empty")
|
||||
response = []
|
||||
for cluster, cluster_tasks in self.tasks.items():
|
||||
for task_id, task in cluster_tasks.items():
|
||||
if task_id in tasks or task.task_arn in tasks:
|
||||
for task_arn, task in cluster_tasks.items():
|
||||
task_id = task_arn.split("/")[-1]
|
||||
if task_arn in tasks or task.task_arn in tasks or any(task_id in task for task in tasks):
|
||||
response.append(task)
|
||||
return response
|
||||
|
||||
@ -700,8 +703,7 @@ class EC2ContainerServiceBackend(BaseBackend):
|
||||
cluster_service_pair].desired_count = desired_count
|
||||
return self.services[cluster_service_pair]
|
||||
else:
|
||||
raise Exception("cluster {0} or service {1} does not exist".format(
|
||||
cluster_name, service_name))
|
||||
raise ServiceNotFoundException(service_name)
|
||||
|
||||
def delete_service(self, cluster_name, service_name):
|
||||
cluster_service_pair = '{0}:{1}'.format(cluster_name, service_name)
|
||||
|
5
moto/glue/__init__.py
Normal file
5
moto/glue/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from __future__ import unicode_literals
|
||||
from .models import glue_backend
|
||||
|
||||
glue_backends = {"global": glue_backend}
|
||||
mock_glue = glue_backend.decorator
|
24
moto/glue/exceptions.py
Normal file
24
moto/glue/exceptions.py
Normal file
@ -0,0 +1,24 @@
|
||||
from __future__ import unicode_literals
|
||||
from moto.core.exceptions import JsonRESTError
|
||||
|
||||
|
||||
class GlueClientError(JsonRESTError):
|
||||
code = 400
|
||||
|
||||
|
||||
class DatabaseAlreadyExistsException(GlueClientError):
|
||||
def __init__(self):
|
||||
self.code = 400
|
||||
super(DatabaseAlreadyExistsException, self).__init__(
|
||||
'DatabaseAlreadyExistsException',
|
||||
'Database already exists.'
|
||||
)
|
||||
|
||||
|
||||
class TableAlreadyExistsException(GlueClientError):
|
||||
def __init__(self):
|
||||
self.code = 400
|
||||
super(TableAlreadyExistsException, self).__init__(
|
||||
'TableAlreadyExistsException',
|
||||
'Table already exists.'
|
||||
)
|
60
moto/glue/models.py
Normal file
60
moto/glue/models.py
Normal file
@ -0,0 +1,60 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.compat import OrderedDict
|
||||
from.exceptions import DatabaseAlreadyExistsException, TableAlreadyExistsException
|
||||
|
||||
|
||||
class GlueBackend(BaseBackend):
|
||||
|
||||
def __init__(self):
|
||||
self.databases = OrderedDict()
|
||||
|
||||
def create_database(self, database_name):
|
||||
if database_name in self.databases:
|
||||
raise DatabaseAlreadyExistsException()
|
||||
|
||||
database = FakeDatabase(database_name)
|
||||
self.databases[database_name] = database
|
||||
return database
|
||||
|
||||
def get_database(self, database_name):
|
||||
return self.databases[database_name]
|
||||
|
||||
def create_table(self, database_name, table_name, table_input):
|
||||
database = self.get_database(database_name)
|
||||
|
||||
if table_name in database.tables:
|
||||
raise TableAlreadyExistsException()
|
||||
|
||||
table = FakeTable(database_name, table_name, table_input)
|
||||
database.tables[table_name] = table
|
||||
return table
|
||||
|
||||
def get_table(self, database_name, table_name):
|
||||
database = self.get_database(database_name)
|
||||
return database.tables[table_name]
|
||||
|
||||
def get_tables(self, database_name):
|
||||
database = self.get_database(database_name)
|
||||
return [table for table_name, table in database.tables.items()]
|
||||
|
||||
|
||||
class FakeDatabase(BaseModel):
|
||||
|
||||
def __init__(self, database_name):
|
||||
self.name = database_name
|
||||
self.tables = OrderedDict()
|
||||
|
||||
|
||||
class FakeTable(BaseModel):
|
||||
|
||||
def __init__(self, database_name, table_name, table_input):
|
||||
self.database_name = database_name
|
||||
self.name = table_name
|
||||
self.table_input = table_input
|
||||
self.storage_descriptor = self.table_input.get('StorageDescriptor', {})
|
||||
self.partition_keys = self.table_input.get('PartitionKeys', [])
|
||||
|
||||
|
||||
glue_backend = GlueBackend()
|
63
moto/glue/responses.py
Normal file
63
moto/glue/responses.py
Normal file
@ -0,0 +1,63 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from .models import glue_backend
|
||||
|
||||
|
||||
class GlueResponse(BaseResponse):
|
||||
|
||||
@property
|
||||
def glue_backend(self):
|
||||
return glue_backend
|
||||
|
||||
@property
|
||||
def parameters(self):
|
||||
return json.loads(self.body)
|
||||
|
||||
def create_database(self):
|
||||
database_name = self.parameters['DatabaseInput']['Name']
|
||||
self.glue_backend.create_database(database_name)
|
||||
return ""
|
||||
|
||||
def get_database(self):
|
||||
database_name = self.parameters.get('Name')
|
||||
database = self.glue_backend.get_database(database_name)
|
||||
return json.dumps({'Database': {'Name': database.name}})
|
||||
|
||||
def create_table(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_input = self.parameters.get('TableInput')
|
||||
table_name = table_input.get('Name')
|
||||
self.glue_backend.create_table(database_name, table_name, table_input)
|
||||
return ""
|
||||
|
||||
def get_table(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_name = self.parameters.get('Name')
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
return json.dumps({
|
||||
'Table': {
|
||||
'DatabaseName': table.database_name,
|
||||
'Name': table.name,
|
||||
'PartitionKeys': table.partition_keys,
|
||||
'StorageDescriptor': table.storage_descriptor
|
||||
}
|
||||
})
|
||||
|
||||
def get_tables(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
tables = self.glue_backend.get_tables(database_name)
|
||||
return json.dumps(
|
||||
{
|
||||
'TableList': [
|
||||
{
|
||||
'DatabaseName': table.database_name,
|
||||
'Name': table.name,
|
||||
'PartitionKeys': table.partition_keys,
|
||||
'StorageDescriptor': table.storage_descriptor
|
||||
} for table in tables
|
||||
]
|
||||
}
|
||||
)
|
11
moto/glue/urls.py
Normal file
11
moto/glue/urls.py
Normal file
@ -0,0 +1,11 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from .responses import GlueResponse
|
||||
|
||||
url_bases = [
|
||||
"https?://glue(.*).amazonaws.com"
|
||||
]
|
||||
|
||||
url_paths = {
|
||||
'{0}/$': GlueResponse.dispatch
|
||||
}
|
1
moto/glue/utils.py
Normal file
1
moto/glue/utils.py
Normal file
@ -0,0 +1 @@
|
||||
from __future__ import unicode_literals
|
@ -50,10 +50,6 @@ class Policy(BaseModel):
|
||||
self.create_datetime = datetime.now(pytz.utc)
|
||||
self.update_datetime = datetime.now(pytz.utc)
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return 'arn:aws:iam::aws:policy{0}{1}'.format(self.path, self.name)
|
||||
|
||||
|
||||
class PolicyVersion(object):
|
||||
|
||||
@ -82,6 +78,10 @@ class ManagedPolicy(Policy):
|
||||
self.attachment_count -= 1
|
||||
del obj.managed_policies[self.name]
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return "arn:aws:iam::{0}:policy{1}{2}".format(ACCOUNT_ID, self.path, self.name)
|
||||
|
||||
|
||||
class AWSManagedPolicy(ManagedPolicy):
|
||||
"""AWS-managed policy."""
|
||||
@ -93,6 +93,10 @@ class AWSManagedPolicy(ManagedPolicy):
|
||||
path=data.get('Path'),
|
||||
document=data.get('Document'))
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return 'arn:aws:iam::aws:policy{0}{1}'.format(self.path, self.name)
|
||||
|
||||
|
||||
# AWS defines some of its own managed policies and we periodically
|
||||
# import them via `make aws_managed_policies`
|
||||
@ -901,5 +905,32 @@ class IAMBackend(BaseBackend):
|
||||
def delete_account_alias(self, alias):
|
||||
self.account_aliases = []
|
||||
|
||||
def get_account_authorization_details(self, filter):
|
||||
policies = self.managed_policies.values()
|
||||
local_policies = set(policies) - set(aws_managed_policies)
|
||||
returned_policies = []
|
||||
|
||||
if len(filter) == 0:
|
||||
return {
|
||||
'instance_profiles': self.instance_profiles.values(),
|
||||
'roles': self.roles.values(),
|
||||
'groups': self.groups.values(),
|
||||
'users': self.users.values(),
|
||||
'managed_policies': self.managed_policies.values()
|
||||
}
|
||||
|
||||
if 'AWSManagedPolicy' in filter:
|
||||
returned_policies = aws_managed_policies
|
||||
if 'LocalManagedPolicy' in filter:
|
||||
returned_policies = returned_policies + list(local_policies)
|
||||
|
||||
return {
|
||||
'instance_profiles': self.instance_profiles.values(),
|
||||
'roles': self.roles.values() if 'Role' in filter else [],
|
||||
'groups': self.groups.values() if 'Group' in filter else [],
|
||||
'users': self.users.values() if 'User' in filter else [],
|
||||
'managed_policies': returned_policies
|
||||
}
|
||||
|
||||
|
||||
iam_backend = IAMBackend()
|
||||
|
@ -534,6 +534,18 @@ class IamResponse(BaseResponse):
|
||||
template = self.response_template(DELETE_ACCOUNT_ALIAS_TEMPLATE)
|
||||
return template.render()
|
||||
|
||||
def get_account_authorization_details(self):
|
||||
filter_param = self._get_multi_param('Filter.member')
|
||||
account_details = iam_backend.get_account_authorization_details(filter_param)
|
||||
template = self.response_template(GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE)
|
||||
return template.render(
|
||||
instance_profiles=account_details['instance_profiles'],
|
||||
policies=account_details['managed_policies'],
|
||||
users=account_details['users'],
|
||||
groups=account_details['groups'],
|
||||
roles=account_details['roles']
|
||||
)
|
||||
|
||||
|
||||
ATTACH_ROLE_POLICY_TEMPLATE = """<AttachRolePolicyResponse>
|
||||
<ResponseMetadata>
|
||||
@ -1309,3 +1321,144 @@ DELETE_ACCOUNT_ALIAS_TEMPLATE = """<DeleteAccountAliasResponse xmlns="https://ia
|
||||
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
|
||||
</ResponseMetadata>
|
||||
</DeleteAccountAliasResponse>"""
|
||||
|
||||
|
||||
LIST_GROUPS_FOR_USER_TEMPLATE = """<ListGroupsForUserResponse>
|
||||
<ListGroupsForUserResult>
|
||||
<Groups>
|
||||
{% for group in groups %}
|
||||
<member>
|
||||
<Path>{{ group.path }}</Path>
|
||||
<GroupName>{{ group.name }}</GroupName>
|
||||
<GroupId>{{ group.id }}</GroupId>
|
||||
<Arn>{{ group.arn }}</Arn>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</Groups>
|
||||
<IsTruncated>false</IsTruncated>
|
||||
</ListGroupsForUserResult>
|
||||
<ResponseMetadata>
|
||||
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
|
||||
</ResponseMetadata>
|
||||
</ListGroupsForUserResponse>"""
|
||||
|
||||
|
||||
GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||
<GetAccountAuthorizationDetailsResult>
|
||||
<IsTruncated>false</IsTruncated>
|
||||
<UserDetailList>
|
||||
{% for user in users %}
|
||||
<member>
|
||||
<GroupList />
|
||||
<AttachedManagedPolicies/>
|
||||
<UserId>{{ user.id }}</UserId>
|
||||
<Path>{{ user.path }}</Path>
|
||||
<UserName>{{ user.name }}</UserName>
|
||||
<Arn>{{ user.arn }}</Arn>
|
||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</UserDetailList>
|
||||
<Marker>
|
||||
EXAMPLEkakv9BCuUNFDtxWSyfzetYwEx2ADc8dnzfvERF5S6YMvXKx41t6gCl/eeaCX3Jo94/
|
||||
bKqezEAg8TEVS99EKFLxm3jtbpl25FDWEXAMPLE
|
||||
</Marker>
|
||||
<GroupDetailList>
|
||||
{% for group in groups %}
|
||||
<member>
|
||||
<GroupId>{{ group.id }}</GroupId>
|
||||
<AttachedManagedPolicies>
|
||||
{% for policy in group.managed_policies %}
|
||||
<member>
|
||||
<PolicyName>{{ policy.name }}</PolicyName>
|
||||
<PolicyArn>{{ policy.arn }}</PolicyArn>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</AttachedManagedPolicies>
|
||||
<GroupName>{{ group.name }}</GroupName>
|
||||
<Path>{{ group.path }}</Path>
|
||||
<Arn>{{ group.arn }}</Arn>
|
||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
||||
<GroupPolicyList/>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</GroupDetailList>
|
||||
<RoleDetailList>
|
||||
{% for role in roles %}
|
||||
<member>
|
||||
<RolePolicyList/>
|
||||
<AttachedManagedPolicies>
|
||||
{% for policy in role.managed_policies %}
|
||||
<member>
|
||||
<PolicyName>{{ policy.name }}</PolicyName>
|
||||
<PolicyArn>{{ policy.arn }}</PolicyArn>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</AttachedManagedPolicies>
|
||||
<InstanceProfileList>
|
||||
{% for profile in instance_profiles %}
|
||||
<member>
|
||||
<Id>{{ profile.id }}</Id>
|
||||
<Roles>
|
||||
{% for role in profile.roles %}
|
||||
<member>
|
||||
<Path>{{ role.path }}</Path>
|
||||
<Arn>{{ role.arn }}</Arn>
|
||||
<RoleName>{{ role.name }}</RoleName>
|
||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
|
||||
<RoleId>{{ role.id }}</RoleId>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</Roles>
|
||||
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
|
||||
<Path>{{ profile.path }}</Path>
|
||||
<Arn>{{ profile.arn }}</Arn>
|
||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</InstanceProfileList>
|
||||
<Path>{{ role.path }}</Path>
|
||||
<Arn>{{ role.arn }}</Arn>
|
||||
<RoleName>{{ role.name }}</RoleName>
|
||||
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
|
||||
<CreateDate>2014-07-30T17:09:20Z</CreateDate>
|
||||
<RoleId>{{ role.id }}</RoleId>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</RoleDetailList>
|
||||
<Policies>
|
||||
{% for policy in policies %}
|
||||
<member>
|
||||
<PolicyName>{{ policy.name }}</PolicyName>
|
||||
<DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId>
|
||||
<PolicyId>{{ policy.id }}</PolicyId>
|
||||
<Path>{{ policy.path }}</Path>
|
||||
<PolicyVersionList>
|
||||
<member>
|
||||
<Document>
|
||||
{"Version":"2012-10-17","Statement":{"Effect":"Allow",
|
||||
"Action":["iam:CreatePolicy","iam:CreatePolicyVersion",
|
||||
"iam:DeletePolicy","iam:DeletePolicyVersion","iam:GetPolicy",
|
||||
"iam:GetPolicyVersion","iam:ListPolicies",
|
||||
"iam:ListPolicyVersions","iam:SetDefaultPolicyVersion"],
|
||||
"Resource":"*"}}
|
||||
</Document>
|
||||
<IsDefaultVersion>true</IsDefaultVersion>
|
||||
<VersionId>v1</VersionId>
|
||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
||||
</member>
|
||||
</PolicyVersionList>
|
||||
<Arn>{{ policy.arn }}</Arn>
|
||||
<AttachmentCount>1</AttachmentCount>
|
||||
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
|
||||
<IsAttachable>true</IsAttachable>
|
||||
<UpdateDate>2012-05-09T16:27:11Z</UpdateDate>
|
||||
</member>
|
||||
{% endfor %}
|
||||
</Policies>
|
||||
</GetAccountAuthorizationDetailsResult>
|
||||
<ResponseMetadata>
|
||||
<RequestId>92e79ae7-7399-11e4-8c85-4b53eEXAMPLE</RequestId>
|
||||
</ResponseMetadata>
|
||||
</GetAccountAuthorizationDetailsResponse>"""
|
||||
|
@ -19,19 +19,20 @@ from .utils import compose_shard_iterator, compose_new_shard_iterator, decompose
|
||||
|
||||
|
||||
class Record(BaseModel):
|
||||
|
||||
def __init__(self, partition_key, data, sequence_number, explicit_hash_key):
|
||||
self.partition_key = partition_key
|
||||
self.data = data
|
||||
self.sequence_number = sequence_number
|
||||
self.explicit_hash_key = explicit_hash_key
|
||||
self.create_at = unix_time()
|
||||
self.created_at_datetime = datetime.datetime.utcnow()
|
||||
self.created_at = unix_time(self.created_at_datetime)
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
"Data": self.data,
|
||||
"PartitionKey": self.partition_key,
|
||||
"SequenceNumber": str(self.sequence_number),
|
||||
"ApproximateArrivalTimestamp": self.created_at_datetime.isoformat()
|
||||
}
|
||||
|
||||
|
||||
@ -50,16 +51,21 @@ class Shard(BaseModel):
|
||||
def get_records(self, last_sequence_id, limit):
|
||||
last_sequence_id = int(last_sequence_id)
|
||||
results = []
|
||||
secs_behind_latest = 0
|
||||
|
||||
for sequence_number, record in self.records.items():
|
||||
if sequence_number > last_sequence_id:
|
||||
results.append(record)
|
||||
last_sequence_id = sequence_number
|
||||
|
||||
very_last_record = self.records[next(reversed(self.records))]
|
||||
secs_behind_latest = very_last_record.created_at - record.created_at
|
||||
|
||||
if len(results) == limit:
|
||||
break
|
||||
|
||||
return results, last_sequence_id
|
||||
millis_behind_latest = int(secs_behind_latest * 1000)
|
||||
return results, last_sequence_id, millis_behind_latest
|
||||
|
||||
def put_record(self, partition_key, data, explicit_hash_key):
|
||||
# Note: this function is not safe for concurrency
|
||||
@ -83,12 +89,12 @@ class Shard(BaseModel):
|
||||
return 0
|
||||
|
||||
def get_sequence_number_at(self, at_timestamp):
|
||||
if not self.records or at_timestamp < list(self.records.values())[0].create_at:
|
||||
if not self.records or at_timestamp < list(self.records.values())[0].created_at:
|
||||
return 0
|
||||
else:
|
||||
# find the last item in the list that was created before
|
||||
# at_timestamp
|
||||
r = next((r for r in reversed(self.records.values()) if r.create_at < at_timestamp), None)
|
||||
r = next((r for r in reversed(self.records.values()) if r.created_at < at_timestamp), None)
|
||||
return r.sequence_number
|
||||
|
||||
def to_json(self):
|
||||
@ -226,7 +232,7 @@ class DeliveryStream(BaseModel):
|
||||
|
||||
self.records = []
|
||||
self.status = 'ACTIVE'
|
||||
self.create_at = datetime.datetime.utcnow()
|
||||
self.created_at = datetime.datetime.utcnow()
|
||||
self.last_updated = datetime.datetime.utcnow()
|
||||
|
||||
@property
|
||||
@ -267,7 +273,7 @@ class DeliveryStream(BaseModel):
|
||||
def to_dict(self):
|
||||
return {
|
||||
"DeliveryStreamDescription": {
|
||||
"CreateTimestamp": time.mktime(self.create_at.timetuple()),
|
||||
"CreateTimestamp": time.mktime(self.created_at.timetuple()),
|
||||
"DeliveryStreamARN": self.arn,
|
||||
"DeliveryStreamName": self.name,
|
||||
"DeliveryStreamStatus": self.status,
|
||||
@ -329,12 +335,12 @@ class KinesisBackend(BaseBackend):
|
||||
stream = self.describe_stream(stream_name)
|
||||
shard = stream.get_shard(shard_id)
|
||||
|
||||
records, last_sequence_id = shard.get_records(last_sequence_id, limit)
|
||||
records, last_sequence_id, millis_behind_latest = shard.get_records(last_sequence_id, limit)
|
||||
|
||||
next_shard_iterator = compose_shard_iterator(
|
||||
stream_name, shard, last_sequence_id)
|
||||
|
||||
return next_shard_iterator, records
|
||||
return next_shard_iterator, records, millis_behind_latest
|
||||
|
||||
def put_record(self, stream_name, partition_key, explicit_hash_key, sequence_number_for_ordering, data):
|
||||
stream = self.describe_stream(stream_name)
|
||||
|
@ -80,12 +80,13 @@ class KinesisResponse(BaseResponse):
|
||||
shard_iterator = self.parameters.get("ShardIterator")
|
||||
limit = self.parameters.get("Limit")
|
||||
|
||||
next_shard_iterator, records = self.kinesis_backend.get_records(
|
||||
next_shard_iterator, records, millis_behind_latest = self.kinesis_backend.get_records(
|
||||
shard_iterator, limit)
|
||||
|
||||
return json.dumps({
|
||||
"NextShardIterator": next_shard_iterator,
|
||||
"Records": [record.to_json() for record in records]
|
||||
"Records": [record.to_json() for record in records],
|
||||
'MillisBehindLatest': millis_behind_latest
|
||||
})
|
||||
|
||||
def put_record(self):
|
||||
|
@ -58,6 +58,12 @@ class Key(BaseModel):
|
||||
|
||||
return key
|
||||
|
||||
def get_cfn_attribute(self, attribute_name):
|
||||
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
|
||||
if attribute_name == 'Arn':
|
||||
return self.arn
|
||||
raise UnformattedGetAttTemplateException()
|
||||
|
||||
|
||||
class KmsBackend(BaseBackend):
|
||||
|
||||
|
@ -175,11 +175,14 @@ class FakeMultipart(BaseModel):
|
||||
count = 0
|
||||
for pn, etag in body:
|
||||
part = self.parts.get(pn)
|
||||
if part is None or part.etag != etag:
|
||||
part_etag = None
|
||||
if part is not None:
|
||||
part_etag = part.etag.replace('"', '')
|
||||
etag = etag.replace('"', '')
|
||||
if part is None or part_etag != etag:
|
||||
raise InvalidPart()
|
||||
if last is not None and len(last.value) < UPLOAD_PART_MIN_SIZE:
|
||||
raise EntityTooSmall()
|
||||
part_etag = part.etag.replace('"', '')
|
||||
md5s.extend(decode_hex(part_etag)[0])
|
||||
total.extend(part.value)
|
||||
last = part
|
||||
@ -718,7 +721,7 @@ class S3Backend(BaseBackend):
|
||||
if key_name in bucket.keys:
|
||||
key = bucket.keys[key_name]
|
||||
else:
|
||||
for key_version in bucket.keys.getlist(key_name):
|
||||
for key_version in bucket.keys.getlist(key_name, default=[]):
|
||||
if str(key_version.version_id) == str(version_id):
|
||||
key = key_version
|
||||
break
|
||||
|
29
moto/secretsmanager/exceptions.py
Normal file
29
moto/secretsmanager/exceptions.py
Normal file
@ -0,0 +1,29 @@
|
||||
from __future__ import unicode_literals
|
||||
from moto.core.exceptions import JsonRESTError
|
||||
|
||||
|
||||
class SecretsManagerClientError(JsonRESTError):
|
||||
code = 400
|
||||
|
||||
|
||||
class ResourceNotFoundException(SecretsManagerClientError):
|
||||
def __init__(self):
|
||||
self.code = 404
|
||||
super(ResourceNotFoundException, self).__init__(
|
||||
"ResourceNotFoundException",
|
||||
"Secrets Manager can't find the specified secret"
|
||||
)
|
||||
|
||||
|
||||
class ClientError(SecretsManagerClientError):
|
||||
def __init__(self, message):
|
||||
super(ClientError, self).__init__(
|
||||
'InvalidParameterValue',
|
||||
message)
|
||||
|
||||
|
||||
class InvalidParameterException(SecretsManagerClientError):
|
||||
def __init__(self, message):
|
||||
super(InvalidParameterException, self).__init__(
|
||||
'InvalidParameterException',
|
||||
message)
|
@ -6,14 +6,22 @@ import json
|
||||
import boto3
|
||||
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from .exceptions import (
|
||||
ResourceNotFoundException,
|
||||
InvalidParameterException,
|
||||
ClientError
|
||||
)
|
||||
from .utils import random_password, secret_arn
|
||||
|
||||
|
||||
class SecretsManager(BaseModel):
|
||||
|
||||
def __init__(self, region_name, **kwargs):
|
||||
self.region = region_name
|
||||
self.secret_id = kwargs.get('secret_id', '')
|
||||
self.version_id = kwargs.get('version_id', '')
|
||||
self.version_stage = kwargs.get('version_stage', '')
|
||||
self.secret_string = ''
|
||||
|
||||
|
||||
class SecretsManagerBackend(BaseBackend):
|
||||
@ -22,15 +30,31 @@ class SecretsManagerBackend(BaseBackend):
|
||||
super(SecretsManagerBackend, self).__init__()
|
||||
self.region = region_name
|
||||
self.secret_id = kwargs.get('secret_id', '')
|
||||
self.name = kwargs.get('name', '')
|
||||
self.createdate = int(time.time())
|
||||
self.secret_string = ''
|
||||
self.rotation_enabled = False
|
||||
self.rotation_lambda_arn = ''
|
||||
self.auto_rotate_after_days = 0
|
||||
|
||||
def reset(self):
|
||||
region_name = self.region
|
||||
self.__dict__ = {}
|
||||
self.__init__(region_name)
|
||||
|
||||
def _is_valid_identifier(self, identifier):
|
||||
return identifier in (self.name, self.secret_id)
|
||||
|
||||
def get_secret_value(self, secret_id, version_id, version_stage):
|
||||
|
||||
if not self._is_valid_identifier(secret_id):
|
||||
raise ResourceNotFoundException()
|
||||
|
||||
response = json.dumps({
|
||||
"ARN": self.secret_arn(),
|
||||
"Name": self.secret_id,
|
||||
"ARN": secret_arn(self.region, self.secret_id),
|
||||
"Name": self.name,
|
||||
"VersionId": "A435958A-D821-4193-B719-B7769357AER4",
|
||||
"SecretString": "mysecretstring",
|
||||
"SecretString": self.secret_string,
|
||||
"VersionStages": [
|
||||
"AWSCURRENT",
|
||||
],
|
||||
@ -39,11 +63,80 @@ class SecretsManagerBackend(BaseBackend):
|
||||
|
||||
return response
|
||||
|
||||
def secret_arn(self):
|
||||
return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-rIjad".format(
|
||||
self.region, self.secret_id)
|
||||
def create_secret(self, name, secret_string, **kwargs):
|
||||
|
||||
self.secret_string = secret_string
|
||||
self.secret_id = name
|
||||
self.name = name
|
||||
|
||||
response = json.dumps({
|
||||
"ARN": secret_arn(self.region, name),
|
||||
"Name": self.name,
|
||||
"VersionId": "A435958A-D821-4193-B719-B7769357AER4",
|
||||
})
|
||||
|
||||
return response
|
||||
|
||||
def describe_secret(self, secret_id):
|
||||
if not self._is_valid_identifier(secret_id):
|
||||
raise ResourceNotFoundException
|
||||
|
||||
response = json.dumps({
|
||||
"ARN": secret_arn(self.region, self.secret_id),
|
||||
"Name": self.name,
|
||||
"Description": "",
|
||||
"KmsKeyId": "",
|
||||
"RotationEnabled": self.rotation_enabled,
|
||||
"RotationLambdaARN": self.rotation_lambda_arn,
|
||||
"RotationRules": {
|
||||
"AutomaticallyAfterDays": self.auto_rotate_after_days
|
||||
},
|
||||
"LastRotatedDate": None,
|
||||
"LastChangedDate": None,
|
||||
"LastAccessedDate": None,
|
||||
"DeletedDate": None,
|
||||
"Tags": [
|
||||
{
|
||||
"Key": "",
|
||||
"Value": ""
|
||||
},
|
||||
]
|
||||
})
|
||||
|
||||
return response
|
||||
|
||||
def get_random_password(self, password_length,
|
||||
exclude_characters, exclude_numbers,
|
||||
exclude_punctuation, exclude_uppercase,
|
||||
exclude_lowercase, include_space,
|
||||
require_each_included_type):
|
||||
# password size must have value less than or equal to 4096
|
||||
if password_length > 4096:
|
||||
raise ClientError(
|
||||
"ClientError: An error occurred (ValidationException) \
|
||||
when calling the GetRandomPassword operation: 1 validation error detected: Value '{}' at 'passwordLength' \
|
||||
failed to satisfy constraint: Member must have value less than or equal to 4096".format(password_length))
|
||||
if password_length < 4:
|
||||
raise InvalidParameterException(
|
||||
"InvalidParameterException: An error occurred (InvalidParameterException) \
|
||||
when calling the GetRandomPassword operation: Password length is too short based on the required types.")
|
||||
|
||||
response = json.dumps({
|
||||
"RandomPassword": random_password(password_length,
|
||||
exclude_characters,
|
||||
exclude_numbers,
|
||||
exclude_punctuation,
|
||||
exclude_uppercase,
|
||||
exclude_lowercase,
|
||||
include_space,
|
||||
require_each_included_type)
|
||||
})
|
||||
|
||||
return response
|
||||
|
||||
|
||||
available_regions = boto3.session.Session().get_available_regions("secretsmanager")
|
||||
print(available_regions)
|
||||
secretsmanager_backends = {region: SecretsManagerBackend(region_name=region) for region in available_regions}
|
||||
available_regions = (
|
||||
boto3.session.Session().get_available_regions("secretsmanager")
|
||||
)
|
||||
secretsmanager_backends = {region: SecretsManagerBackend(region_name=region)
|
||||
for region in available_regions}
|
||||
|
@ -15,3 +15,38 @@ class SecretsManagerResponse(BaseResponse):
|
||||
secret_id=secret_id,
|
||||
version_id=version_id,
|
||||
version_stage=version_stage)
|
||||
|
||||
def create_secret(self):
|
||||
name = self._get_param('Name')
|
||||
secret_string = self._get_param('SecretString')
|
||||
return secretsmanager_backends[self.region].create_secret(
|
||||
name=name,
|
||||
secret_string=secret_string
|
||||
)
|
||||
|
||||
def get_random_password(self):
|
||||
password_length = self._get_param('PasswordLength', if_none=32)
|
||||
exclude_characters = self._get_param('ExcludeCharacters', if_none='')
|
||||
exclude_numbers = self._get_param('ExcludeNumbers', if_none=False)
|
||||
exclude_punctuation = self._get_param('ExcludePunctuation', if_none=False)
|
||||
exclude_uppercase = self._get_param('ExcludeUppercase', if_none=False)
|
||||
exclude_lowercase = self._get_param('ExcludeLowercase', if_none=False)
|
||||
include_space = self._get_param('IncludeSpace', if_none=False)
|
||||
require_each_included_type = self._get_param(
|
||||
'RequireEachIncludedType', if_none=True)
|
||||
return secretsmanager_backends[self.region].get_random_password(
|
||||
password_length=password_length,
|
||||
exclude_characters=exclude_characters,
|
||||
exclude_numbers=exclude_numbers,
|
||||
exclude_punctuation=exclude_punctuation,
|
||||
exclude_uppercase=exclude_uppercase,
|
||||
exclude_lowercase=exclude_lowercase,
|
||||
include_space=include_space,
|
||||
require_each_included_type=require_each_included_type
|
||||
)
|
||||
|
||||
def describe_secret(self):
|
||||
secret_id = self._get_param('SecretId')
|
||||
return secretsmanager_backends[self.region].describe_secret(
|
||||
secret_id=secret_id
|
||||
)
|
||||
|
72
moto/secretsmanager/utils.py
Normal file
72
moto/secretsmanager/utils.py
Normal file
@ -0,0 +1,72 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import random
|
||||
import string
|
||||
import six
|
||||
import re
|
||||
|
||||
|
||||
def random_password(password_length, exclude_characters, exclude_numbers,
|
||||
exclude_punctuation, exclude_uppercase, exclude_lowercase,
|
||||
include_space, require_each_included_type):
|
||||
|
||||
password = ''
|
||||
required_characters = ''
|
||||
|
||||
if not exclude_lowercase and not exclude_uppercase:
|
||||
password += string.ascii_letters
|
||||
required_characters += random.choice(_exclude_characters(
|
||||
string.ascii_lowercase, exclude_characters))
|
||||
required_characters += random.choice(_exclude_characters(
|
||||
string.ascii_uppercase, exclude_characters))
|
||||
elif not exclude_lowercase:
|
||||
password += string.ascii_lowercase
|
||||
required_characters += random.choice(_exclude_characters(
|
||||
string.ascii_lowercase, exclude_characters))
|
||||
elif not exclude_uppercase:
|
||||
password += string.ascii_uppercase
|
||||
required_characters += random.choice(_exclude_characters(
|
||||
string.ascii_uppercase, exclude_characters))
|
||||
if not exclude_numbers:
|
||||
password += string.digits
|
||||
required_characters += random.choice(_exclude_characters(
|
||||
string.digits, exclude_characters))
|
||||
if not exclude_punctuation:
|
||||
password += string.punctuation
|
||||
required_characters += random.choice(_exclude_characters(
|
||||
string.punctuation, exclude_characters))
|
||||
if include_space:
|
||||
password += " "
|
||||
required_characters += " "
|
||||
|
||||
password = ''.join(
|
||||
six.text_type(random.choice(password))
|
||||
for x in range(password_length))
|
||||
|
||||
if require_each_included_type:
|
||||
password = _add_password_require_each_included_type(
|
||||
password, required_characters)
|
||||
|
||||
password = _exclude_characters(password, exclude_characters)
|
||||
return password
|
||||
|
||||
|
||||
def secret_arn(region, secret_id):
|
||||
return "arn:aws:secretsmanager:{0}:1234567890:secret:{1}-rIjad".format(
|
||||
region, secret_id)
|
||||
|
||||
|
||||
def _exclude_characters(password, exclude_characters):
|
||||
for c in exclude_characters:
|
||||
if c in string.punctuation:
|
||||
# Escape punctuation regex usage
|
||||
c = "\{0}".format(c)
|
||||
password = re.sub(c, '', str(password))
|
||||
return password
|
||||
|
||||
|
||||
def _add_password_require_each_included_type(password, required_characters):
|
||||
password_with_required_char = password[:-len(required_characters)]
|
||||
password_with_required_char += required_characters
|
||||
|
||||
return password_with_required_char
|
@ -13,14 +13,21 @@ RECIPIENT_LIMIT = 50
|
||||
|
||||
class Message(BaseModel):
|
||||
|
||||
def __init__(self, message_id):
|
||||
def __init__(self, message_id, source, subject, body, destinations):
|
||||
self.id = message_id
|
||||
self.source = source
|
||||
self.subject = subject
|
||||
self.body = body
|
||||
self.destinations = destinations
|
||||
|
||||
|
||||
class RawMessage(BaseModel):
|
||||
|
||||
def __init__(self, message_id):
|
||||
def __init__(self, message_id, source, destinations, raw_data):
|
||||
self.id = message_id
|
||||
self.source = source
|
||||
self.destinations = destinations
|
||||
self.raw_data = raw_data
|
||||
|
||||
|
||||
class SESQuota(BaseModel):
|
||||
@ -79,7 +86,7 @@ class SESBackend(BaseBackend):
|
||||
)
|
||||
|
||||
message_id = get_random_message_id()
|
||||
message = Message(message_id)
|
||||
message = Message(message_id, source, subject, body, destinations)
|
||||
self.sent_messages.append(message)
|
||||
self.sent_message_count += recipient_count
|
||||
return message
|
||||
@ -116,7 +123,7 @@ class SESBackend(BaseBackend):
|
||||
|
||||
self.sent_message_count += recipient_count
|
||||
message_id = get_random_message_id()
|
||||
message = RawMessage(message_id)
|
||||
message = RawMessage(message_id, source, destinations, raw_data)
|
||||
self.sent_messages.append(message)
|
||||
return message
|
||||
|
||||
|
@ -181,6 +181,7 @@ class SNSResponse(BaseResponse):
|
||||
topic_arn = self._get_param('TopicArn')
|
||||
endpoint = self._get_param('Endpoint')
|
||||
protocol = self._get_param('Protocol')
|
||||
attributes = self._get_attributes()
|
||||
|
||||
if protocol == 'sms' and not is_e164(endpoint):
|
||||
return self._error(
|
||||
@ -190,6 +191,10 @@ class SNSResponse(BaseResponse):
|
||||
|
||||
subscription = self.backend.subscribe(topic_arn, endpoint, protocol)
|
||||
|
||||
if attributes is not None:
|
||||
for attr_name, attr_value in attributes.items():
|
||||
self.backend.set_subscription_attributes(subscription.arn, attr_name, attr_value)
|
||||
|
||||
if self.request_json:
|
||||
return json.dumps({
|
||||
"SubscribeResponse": {
|
||||
|
@ -3,6 +3,7 @@ from __future__ import unicode_literals
|
||||
from collections import defaultdict
|
||||
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.exceptions import RESTError
|
||||
from moto.ec2 import ec2_backends
|
||||
|
||||
import datetime
|
||||
@ -58,11 +59,86 @@ class Parameter(BaseModel):
|
||||
return r
|
||||
|
||||
|
||||
MAX_TIMEOUT_SECONDS = 3600
|
||||
|
||||
|
||||
class Command(BaseModel):
|
||||
def __init__(self, comment='', document_name='', timeout_seconds=MAX_TIMEOUT_SECONDS,
|
||||
instance_ids=None, max_concurrency='', max_errors='',
|
||||
notification_config=None, output_s3_bucket_name='',
|
||||
output_s3_key_prefix='', output_s3_region='', parameters=None,
|
||||
service_role_arn='', targets=None):
|
||||
|
||||
if instance_ids is None:
|
||||
instance_ids = []
|
||||
|
||||
if notification_config is None:
|
||||
notification_config = {}
|
||||
|
||||
if parameters is None:
|
||||
parameters = {}
|
||||
|
||||
if targets is None:
|
||||
targets = []
|
||||
|
||||
self.error_count = 0
|
||||
self.completed_count = len(instance_ids)
|
||||
self.target_count = len(instance_ids)
|
||||
self.command_id = str(uuid.uuid4())
|
||||
self.status = 'Success'
|
||||
self.status_details = 'Details placeholder'
|
||||
|
||||
now = datetime.datetime.now()
|
||||
self.requested_date_time = now.isoformat()
|
||||
expires_after = now + datetime.timedelta(0, timeout_seconds)
|
||||
self.expires_after = expires_after.isoformat()
|
||||
|
||||
self.comment = comment
|
||||
self.document_name = document_name
|
||||
self.instance_ids = instance_ids
|
||||
self.max_concurrency = max_concurrency
|
||||
self.max_errors = max_errors
|
||||
self.notification_config = notification_config
|
||||
self.output_s3_bucket_name = output_s3_bucket_name
|
||||
self.output_s3_key_prefix = output_s3_key_prefix
|
||||
self.output_s3_region = output_s3_region
|
||||
self.parameters = parameters
|
||||
self.service_role_arn = service_role_arn
|
||||
self.targets = targets
|
||||
|
||||
def response_object(self):
|
||||
r = {
|
||||
'CommandId': self.command_id,
|
||||
'Comment': self.comment,
|
||||
'CompletedCount': self.completed_count,
|
||||
'DocumentName': self.document_name,
|
||||
'ErrorCount': self.error_count,
|
||||
'ExpiresAfter': self.expires_after,
|
||||
'InstanceIds': self.instance_ids,
|
||||
'MaxConcurrency': self.max_concurrency,
|
||||
'MaxErrors': self.max_errors,
|
||||
'NotificationConfig': self.notification_config,
|
||||
'OutputS3Region': self.output_s3_region,
|
||||
'OutputS3BucketName': self.output_s3_bucket_name,
|
||||
'OutputS3KeyPrefix': self.output_s3_key_prefix,
|
||||
'Parameters': self.parameters,
|
||||
'RequestedDateTime': self.requested_date_time,
|
||||
'ServiceRole': self.service_role_arn,
|
||||
'Status': self.status,
|
||||
'StatusDetails': self.status_details,
|
||||
'TargetCount': self.target_count,
|
||||
'Targets': self.targets,
|
||||
}
|
||||
|
||||
return r
|
||||
|
||||
|
||||
class SimpleSystemManagerBackend(BaseBackend):
|
||||
|
||||
def __init__(self):
|
||||
self._parameters = {}
|
||||
self._resource_tags = defaultdict(lambda: defaultdict(dict))
|
||||
self._commands = []
|
||||
|
||||
def delete_parameter(self, name):
|
||||
try:
|
||||
@ -100,7 +176,7 @@ class SimpleSystemManagerBackend(BaseBackend):
|
||||
# difference here.
|
||||
path = path.rstrip('/') + '/'
|
||||
for param in self._parameters:
|
||||
if not param.startswith(path):
|
||||
if path != '/' and not param.startswith(path):
|
||||
continue
|
||||
if '/' in param[len(path) + 1:] and not recursive:
|
||||
continue
|
||||
@ -167,38 +243,61 @@ class SimpleSystemManagerBackend(BaseBackend):
|
||||
return self._resource_tags[resource_type][resource_id]
|
||||
|
||||
def send_command(self, **kwargs):
|
||||
instances = kwargs.get('InstanceIds', [])
|
||||
now = datetime.datetime.now()
|
||||
expires_after = now + datetime.timedelta(0, int(kwargs.get('TimeoutSeconds', 3600)))
|
||||
command = Command(
|
||||
comment=kwargs.get('Comment', ''),
|
||||
document_name=kwargs.get('DocumentName'),
|
||||
timeout_seconds=kwargs.get('TimeoutSeconds', 3600),
|
||||
instance_ids=kwargs.get('InstanceIds', []),
|
||||
max_concurrency=kwargs.get('MaxConcurrency', '50'),
|
||||
max_errors=kwargs.get('MaxErrors', '0'),
|
||||
notification_config=kwargs.get('NotificationConfig', {
|
||||
'NotificationArn': 'string',
|
||||
'NotificationEvents': ['Success'],
|
||||
'NotificationType': 'Command'
|
||||
}),
|
||||
output_s3_bucket_name=kwargs.get('OutputS3BucketName', ''),
|
||||
output_s3_key_prefix=kwargs.get('OutputS3KeyPrefix', ''),
|
||||
output_s3_region=kwargs.get('OutputS3Region', ''),
|
||||
parameters=kwargs.get('Parameters', {}),
|
||||
service_role_arn=kwargs.get('ServiceRoleArn', ''),
|
||||
targets=kwargs.get('Targets', []))
|
||||
|
||||
self._commands.append(command)
|
||||
return {
|
||||
'Command': {
|
||||
'CommandId': str(uuid.uuid4()),
|
||||
'DocumentName': kwargs['DocumentName'],
|
||||
'Comment': kwargs.get('Comment'),
|
||||
'ExpiresAfter': expires_after.isoformat(),
|
||||
'Parameters': kwargs['Parameters'],
|
||||
'InstanceIds': kwargs['InstanceIds'],
|
||||
'Targets': kwargs.get('targets'),
|
||||
'RequestedDateTime': now.isoformat(),
|
||||
'Status': 'Success',
|
||||
'StatusDetails': 'string',
|
||||
'OutputS3Region': kwargs.get('OutputS3Region'),
|
||||
'OutputS3BucketName': kwargs.get('OutputS3BucketName'),
|
||||
'OutputS3KeyPrefix': kwargs.get('OutputS3KeyPrefix'),
|
||||
'MaxConcurrency': 'string',
|
||||
'MaxErrors': 'string',
|
||||
'TargetCount': len(instances),
|
||||
'CompletedCount': len(instances),
|
||||
'ErrorCount': 0,
|
||||
'ServiceRole': kwargs.get('ServiceRoleArn'),
|
||||
'NotificationConfig': {
|
||||
'NotificationArn': 'string',
|
||||
'NotificationEvents': ['Success'],
|
||||
'NotificationType': 'Command'
|
||||
}
|
||||
}
|
||||
'Command': command.response_object()
|
||||
}
|
||||
|
||||
def list_commands(self, **kwargs):
|
||||
"""
|
||||
https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_ListCommands.html
|
||||
"""
|
||||
commands = self._commands
|
||||
|
||||
command_id = kwargs.get('CommandId', None)
|
||||
if command_id:
|
||||
commands = [self.get_command_by_id(command_id)]
|
||||
instance_id = kwargs.get('InstanceId', None)
|
||||
if instance_id:
|
||||
commands = self.get_commands_by_instance_id(instance_id)
|
||||
|
||||
return {
|
||||
'Commands': [command.response_object() for command in commands]
|
||||
}
|
||||
|
||||
def get_command_by_id(self, id):
|
||||
command = next(
|
||||
(command for command in self._commands if command.command_id == id), None)
|
||||
|
||||
if command is None:
|
||||
raise RESTError('InvalidCommandId', 'Invalid command id.')
|
||||
|
||||
return command
|
||||
|
||||
def get_commands_by_instance_id(self, instance_id):
|
||||
return [
|
||||
command for command in self._commands
|
||||
if instance_id in command.instance_ids]
|
||||
|
||||
|
||||
ssm_backends = {}
|
||||
for region, ec2_backend in ec2_backends.items():
|
||||
|
@ -205,3 +205,8 @@ class SimpleSystemManagerResponse(BaseResponse):
|
||||
return json.dumps(
|
||||
self.ssm_backend.send_command(**self.request_params)
|
||||
)
|
||||
|
||||
def list_commands(self):
|
||||
return json.dumps(
|
||||
self.ssm_backend.list_commands(**self.request_params)
|
||||
)
|
||||
|
@ -1,7 +1,7 @@
|
||||
import boto3
|
||||
import json
|
||||
|
||||
# Taken from free tear list when creating an instance
|
||||
# Taken from free tier list when creating an instance
|
||||
instances = [
|
||||
'ami-760aaa0f', 'ami-bb9a6bc2', 'ami-35e92e4c', 'ami-785db401', 'ami-b7e93bce', 'ami-dca37ea5', 'ami-999844e0',
|
||||
'ami-9b32e8e2', 'ami-f8e54081', 'ami-bceb39c5', 'ami-03cf127a', 'ami-1ecc1e67', 'ami-c2ff2dbb', 'ami-12c6146b',
|
||||
|
@ -6,6 +6,9 @@ from botocore.session import Session
|
||||
import boto3
|
||||
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def get_moto_implementation(service_name):
|
||||
service_name_standardized = service_name.replace("-", "") if "-" in service_name else service_name
|
||||
if not hasattr(moto, service_name_standardized):
|
||||
@ -73,20 +76,22 @@ def write_implementation_coverage_to_file(coverage):
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
for service_name in sorted(coverage):
|
||||
implemented = coverage.get(service_name)['implemented']
|
||||
not_implemented = coverage.get(service_name)['not_implemented']
|
||||
operations = sorted(implemented + not_implemented)
|
||||
implementation_coverage_file = "{}/../IMPLEMENTATION_COVERAGE.md".format(script_dir)
|
||||
# rewrite the implementation coverage file with updated values
|
||||
print("Writing to {}".format(implementation_coverage_file))
|
||||
with open(implementation_coverage_file, "a+") as file:
|
||||
for service_name in sorted(coverage):
|
||||
implemented = coverage.get(service_name)['implemented']
|
||||
not_implemented = coverage.get(service_name)['not_implemented']
|
||||
operations = sorted(implemented + not_implemented)
|
||||
|
||||
if implemented and not_implemented:
|
||||
percentage_implemented = int(100.0 * len(implemented) / (len(implemented) + len(not_implemented)))
|
||||
elif implemented:
|
||||
percentage_implemented = 100
|
||||
else:
|
||||
percentage_implemented = 0
|
||||
if implemented and not_implemented:
|
||||
percentage_implemented = int(100.0 * len(implemented) / (len(implemented) + len(not_implemented)))
|
||||
elif implemented:
|
||||
percentage_implemented = 100
|
||||
else:
|
||||
percentage_implemented = 0
|
||||
|
||||
# rewrite the implementation coverage file with updated values
|
||||
with open("../IMPLEMENTATION_COVERAGE.md", "a+") as file:
|
||||
file.write("\n")
|
||||
file.write("## {} - {}% implemented\n".format(service_name, percentage_implemented))
|
||||
for op in operations:
|
||||
|
4
setup.py
4
setup.py
@ -9,7 +9,7 @@ install_requires = [
|
||||
"Jinja2>=2.7.3",
|
||||
"boto>=2.36.0",
|
||||
"boto3>=1.6.16",
|
||||
"botocore>=1.9.16",
|
||||
"botocore>=1.9.16,<1.11",
|
||||
"cookies",
|
||||
"cryptography>=2.0.0",
|
||||
"requests>=2.5",
|
||||
@ -41,7 +41,7 @@ else:
|
||||
|
||||
setup(
|
||||
name='moto',
|
||||
version='1.3.3',
|
||||
version='1.3.5',
|
||||
description='A library that allows your python tests to easily'
|
||||
' mock out the boto library',
|
||||
author='Steve Pulec',
|
||||
|
@ -981,7 +981,7 @@ def test_api_keys():
|
||||
apikey['value'].should.equal(apikey_value)
|
||||
|
||||
apikey_name = 'TESTKEY2'
|
||||
payload = {'name': apikey_name, 'generateDistinctId': True}
|
||||
payload = {'name': apikey_name }
|
||||
response = client.create_api_key(**payload)
|
||||
apikey_id = response['id']
|
||||
apikey = client.get_api_key(apiKey=apikey_id)
|
||||
@ -995,3 +995,92 @@ def test_api_keys():
|
||||
|
||||
response = client.get_api_keys()
|
||||
len(response['items']).should.equal(1)
|
||||
|
||||
@mock_apigateway
|
||||
def test_usage_plans():
|
||||
region_name = 'us-west-2'
|
||||
client = boto3.client('apigateway', region_name=region_name)
|
||||
response = client.get_usage_plans()
|
||||
len(response['items']).should.equal(0)
|
||||
|
||||
usage_plan_name = 'TEST-PLAN'
|
||||
payload = {'name': usage_plan_name}
|
||||
response = client.create_usage_plan(**payload)
|
||||
usage_plan = client.get_usage_plan(usagePlanId=response['id'])
|
||||
usage_plan['name'].should.equal(usage_plan_name)
|
||||
usage_plan['apiStages'].should.equal([])
|
||||
|
||||
usage_plan_name = 'TEST-PLAN-2'
|
||||
usage_plan_description = 'Description'
|
||||
usage_plan_quota = {'limit': 10, 'period': 'DAY', 'offset': 0}
|
||||
usage_plan_throttle = {'rateLimit': 2, 'burstLimit': 1}
|
||||
usage_plan_api_stages = [{'apiId': 'foo', 'stage': 'bar'}]
|
||||
payload = {'name': usage_plan_name, 'description': usage_plan_description, 'quota': usage_plan_quota, 'throttle': usage_plan_throttle, 'apiStages': usage_plan_api_stages}
|
||||
response = client.create_usage_plan(**payload)
|
||||
usage_plan_id = response['id']
|
||||
usage_plan = client.get_usage_plan(usagePlanId=usage_plan_id)
|
||||
usage_plan['name'].should.equal(usage_plan_name)
|
||||
usage_plan['description'].should.equal(usage_plan_description)
|
||||
usage_plan['apiStages'].should.equal(usage_plan_api_stages)
|
||||
usage_plan['throttle'].should.equal(usage_plan_throttle)
|
||||
usage_plan['quota'].should.equal(usage_plan_quota)
|
||||
|
||||
response = client.get_usage_plans()
|
||||
len(response['items']).should.equal(2)
|
||||
|
||||
client.delete_usage_plan(usagePlanId=usage_plan_id)
|
||||
|
||||
response = client.get_usage_plans()
|
||||
len(response['items']).should.equal(1)
|
||||
|
||||
@mock_apigateway
|
||||
def test_usage_plan_keys():
|
||||
region_name = 'us-west-2'
|
||||
usage_plan_id = 'test_usage_plan_id'
|
||||
client = boto3.client('apigateway', region_name=region_name)
|
||||
usage_plan_id = "test"
|
||||
|
||||
# Create an API key so we can use it
|
||||
key_name = 'test-api-key'
|
||||
response = client.create_api_key(name=key_name)
|
||||
key_id = response["id"]
|
||||
key_value = response["value"]
|
||||
|
||||
# Get current plan keys (expect none)
|
||||
response = client.get_usage_plan_keys(usagePlanId=usage_plan_id)
|
||||
len(response['items']).should.equal(0)
|
||||
|
||||
# Create usage plan key
|
||||
key_type = 'API_KEY'
|
||||
payload = {'usagePlanId': usage_plan_id, 'keyId': key_id, 'keyType': key_type }
|
||||
response = client.create_usage_plan_key(**payload)
|
||||
usage_plan_key_id = response["id"]
|
||||
|
||||
# Get current plan keys (expect 1)
|
||||
response = client.get_usage_plan_keys(usagePlanId=usage_plan_id)
|
||||
len(response['items']).should.equal(1)
|
||||
|
||||
# Get a single usage plan key and check it matches the created one
|
||||
usage_plan_key = client.get_usage_plan_key(usagePlanId=usage_plan_id, keyId=usage_plan_key_id)
|
||||
usage_plan_key['name'].should.equal(key_name)
|
||||
usage_plan_key['id'].should.equal(key_id)
|
||||
usage_plan_key['type'].should.equal(key_type)
|
||||
usage_plan_key['value'].should.equal(key_value)
|
||||
|
||||
# Delete usage plan key
|
||||
client.delete_usage_plan_key(usagePlanId=usage_plan_id, keyId=key_id)
|
||||
|
||||
# Get current plan keys (expect none)
|
||||
response = client.get_usage_plan_keys(usagePlanId=usage_plan_id)
|
||||
len(response['items']).should.equal(0)
|
||||
|
||||
@mock_apigateway
|
||||
def test_create_usage_plan_key_non_existent_api_key():
|
||||
region_name = 'us-west-2'
|
||||
usage_plan_id = 'test_usage_plan_id'
|
||||
client = boto3.client('apigateway', region_name=region_name)
|
||||
usage_plan_id = "test"
|
||||
|
||||
# Attempt to create a usage plan key for a API key that doesn't exists
|
||||
payload = {'usagePlanId': usage_plan_id, 'keyId': 'non-existent', 'keyType': 'API_KEY' }
|
||||
client.create_usage_plan_key.when.called_with(**payload).should.throw(ClientError)
|
||||
|
@ -1,5 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
import json
|
||||
|
||||
import moto.server as server
|
||||
|
||||
@ -9,8 +10,82 @@ Test the different server responses
|
||||
|
||||
|
||||
def test_list_apis():
|
||||
backend = server.create_backend_app("apigateway")
|
||||
backend = server.create_backend_app('apigateway')
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/restapis')
|
||||
res.data.should.equal(b'{"item": []}')
|
||||
|
||||
def test_usage_plans_apis():
|
||||
backend = server.create_backend_app('apigateway')
|
||||
test_client = backend.test_client()
|
||||
|
||||
# List usage plans (expect empty)
|
||||
res = test_client.get('/usageplans')
|
||||
json.loads(res.data)["item"].should.have.length_of(0)
|
||||
|
||||
# Create usage plan
|
||||
res = test_client.post('/usageplans', data=json.dumps({'name': 'test'}))
|
||||
created_plan = json.loads(res.data)
|
||||
created_plan['name'].should.equal('test')
|
||||
|
||||
# List usage plans (expect 1 plan)
|
||||
res = test_client.get('/usageplans')
|
||||
json.loads(res.data)["item"].should.have.length_of(1)
|
||||
|
||||
# Get single usage plan
|
||||
res = test_client.get('/usageplans/{0}'.format(created_plan["id"]))
|
||||
fetched_plan = json.loads(res.data)
|
||||
fetched_plan.should.equal(created_plan)
|
||||
|
||||
# Delete usage plan
|
||||
res = test_client.delete('/usageplans/{0}'.format(created_plan["id"]))
|
||||
res.data.should.equal(b'{}')
|
||||
|
||||
# List usage plans (expect empty again)
|
||||
res = test_client.get('/usageplans')
|
||||
json.loads(res.data)["item"].should.have.length_of(0)
|
||||
|
||||
def test_usage_plans_keys():
|
||||
backend = server.create_backend_app('apigateway')
|
||||
test_client = backend.test_client()
|
||||
usage_plan_id = 'test_plan_id'
|
||||
|
||||
# Create API key to be used in tests
|
||||
res = test_client.post('/apikeys', data=json.dumps({'name': 'test'}))
|
||||
created_api_key = json.loads(res.data)
|
||||
|
||||
# List usage plans keys (expect empty)
|
||||
res = test_client.get('/usageplans/{0}/keys'.format(usage_plan_id))
|
||||
json.loads(res.data)["item"].should.have.length_of(0)
|
||||
|
||||
# Create usage plan key
|
||||
res = test_client.post('/usageplans/{0}/keys'.format(usage_plan_id), data=json.dumps({'keyId': created_api_key["id"], 'keyType': 'API_KEY'}))
|
||||
created_usage_plan_key = json.loads(res.data)
|
||||
|
||||
# List usage plans keys (expect 1 key)
|
||||
res = test_client.get('/usageplans/{0}/keys'.format(usage_plan_id))
|
||||
json.loads(res.data)["item"].should.have.length_of(1)
|
||||
|
||||
# Get single usage plan key
|
||||
res = test_client.get('/usageplans/{0}/keys/{1}'.format(usage_plan_id, created_api_key["id"]))
|
||||
fetched_plan_key = json.loads(res.data)
|
||||
fetched_plan_key.should.equal(created_usage_plan_key)
|
||||
|
||||
# Delete usage plan key
|
||||
res = test_client.delete('/usageplans/{0}/keys/{1}'.format(usage_plan_id, created_api_key["id"]))
|
||||
res.data.should.equal(b'{}')
|
||||
|
||||
# List usage plans keys (expect to be empty again)
|
||||
res = test_client.get('/usageplans/{0}/keys'.format(usage_plan_id))
|
||||
json.loads(res.data)["item"].should.have.length_of(0)
|
||||
|
||||
def test_create_usage_plans_key_non_existent_api_key():
|
||||
backend = server.create_backend_app('apigateway')
|
||||
test_client = backend.test_client()
|
||||
usage_plan_id = 'test_plan_id'
|
||||
|
||||
# Create usage plan key with non-existent api key
|
||||
res = test_client.post('/usageplans/{0}/keys'.format(usage_plan_id), data=json.dumps({'keyId': 'non-existent', 'keyType': 'API_KEY'}))
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
|
39
tests/test_cloudformation/fixtures/kms_key.py
Normal file
39
tests/test_cloudformation/fixtures/kms_key.py
Normal file
@ -0,0 +1,39 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
|
||||
"Description": "AWS CloudFormation Sample Template to create a KMS Key. The Fn::GetAtt is used to retrieve the ARN",
|
||||
|
||||
"Resources" : {
|
||||
"myKey" : {
|
||||
"Type" : "AWS::KMS::Key",
|
||||
"Properties" : {
|
||||
"Description": "Sample KmsKey",
|
||||
"EnableKeyRotation": False,
|
||||
"Enabled": True,
|
||||
"KeyPolicy" : {
|
||||
"Version": "2012-10-17",
|
||||
"Id": "key-default-1",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "Enable IAM User Permissions",
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": { "Fn::Join" : ["" , ["arn:aws:iam::", {"Ref" : "AWS::AccountId"} ,":root" ]] }
|
||||
},
|
||||
"Action": "kms:*",
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Outputs" : {
|
||||
"KeyArn" : {
|
||||
"Description": "Generated Key Arn",
|
||||
"Value" : { "Fn::GetAtt" : [ "myKey", "Arn" ] }
|
||||
}
|
||||
}
|
||||
}
|
@ -254,6 +254,21 @@ def test_parse_stack_with_get_attribute_outputs():
|
||||
output.should.be.a(Output)
|
||||
output.value.should.equal("my-queue")
|
||||
|
||||
def test_parse_stack_with_get_attribute_kms():
|
||||
from .fixtures.kms_key import template
|
||||
|
||||
template_json = json.dumps(template)
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('KeyArn')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
|
||||
def test_parse_stack_with_get_availability_zones():
|
||||
stack = FakeStack(
|
||||
|
@ -596,7 +596,50 @@ def test_boto3_conditions():
|
||||
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_put_item_conditions_fails():
|
||||
def test_boto3_put_item_conditions_pass():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'bar'})
|
||||
table.put_item(
|
||||
Item={'username': 'johndoe', 'foo': 'baz'},
|
||||
Expected={
|
||||
'foo': {
|
||||
'ComparisonOperator': 'EQ',
|
||||
'AttributeValueList': ['bar']
|
||||
}
|
||||
})
|
||||
final_item = table.get_item(Key={'username': 'johndoe'})
|
||||
assert dict(final_item)['Item']['foo'].should.equal("baz")
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_put_item_conditions_pass_because_expect_not_exists_by_compare_to_null():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'bar'})
|
||||
table.put_item(
|
||||
Item={'username': 'johndoe', 'foo': 'baz'},
|
||||
Expected={
|
||||
'whatever': {
|
||||
'ComparisonOperator': 'NULL',
|
||||
}
|
||||
})
|
||||
final_item = table.get_item(Key={'username': 'johndoe'})
|
||||
assert dict(final_item)['Item']['foo'].should.equal("baz")
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_put_item_conditions_pass_because_expect_exists_by_compare_to_not_null():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'bar'})
|
||||
table.put_item(
|
||||
Item={'username': 'johndoe', 'foo': 'baz'},
|
||||
Expected={
|
||||
'foo': {
|
||||
'ComparisonOperator': 'NOT_NULL',
|
||||
}
|
||||
})
|
||||
final_item = table.get_item(Key={'username': 'johndoe'})
|
||||
assert dict(final_item)['Item']['foo'].should.equal("baz")
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_put_item_conditions_fail():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'bar'})
|
||||
table.put_item.when.called_with(
|
||||
@ -609,7 +652,7 @@ def test_boto3_put_item_conditions_fails():
|
||||
}).should.throw(botocore.client.ClientError)
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_update_item_conditions_fails():
|
||||
def test_boto3_update_item_conditions_fail():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'baz'})
|
||||
table.update_item.when.called_with(
|
||||
@ -622,7 +665,7 @@ def test_boto3_update_item_conditions_fails():
|
||||
}).should.throw(botocore.client.ClientError)
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_update_item_conditions_fails_because_expect_not_exists():
|
||||
def test_boto3_update_item_conditions_fail_because_expect_not_exists():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'baz'})
|
||||
table.update_item.when.called_with(
|
||||
@ -634,6 +677,19 @@ def test_boto3_update_item_conditions_fails_because_expect_not_exists():
|
||||
}
|
||||
}).should.throw(botocore.client.ClientError)
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_update_item_conditions_fail_because_expect_not_exists_by_compare_to_null():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'baz'})
|
||||
table.update_item.when.called_with(
|
||||
Key={'username': 'johndoe'},
|
||||
UpdateExpression='SET foo=bar',
|
||||
Expected={
|
||||
'foo': {
|
||||
'ComparisonOperator': 'NULL',
|
||||
}
|
||||
}).should.throw(botocore.client.ClientError)
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_update_item_conditions_pass():
|
||||
table = _create_user_table()
|
||||
@ -650,7 +706,7 @@ def test_boto3_update_item_conditions_pass():
|
||||
assert dict(returned_item)['Item']['foo'].should.equal("baz")
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_update_item_conditions_pass_because_expext_not_exists():
|
||||
def test_boto3_update_item_conditions_pass_because_expect_not_exists():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'bar'})
|
||||
table.update_item(
|
||||
@ -664,6 +720,36 @@ def test_boto3_update_item_conditions_pass_because_expext_not_exists():
|
||||
returned_item = table.get_item(Key={'username': 'johndoe'})
|
||||
assert dict(returned_item)['Item']['foo'].should.equal("baz")
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_update_item_conditions_pass_because_expect_not_exists_by_compare_to_null():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'bar'})
|
||||
table.update_item(
|
||||
Key={'username': 'johndoe'},
|
||||
UpdateExpression='SET foo=baz',
|
||||
Expected={
|
||||
'whatever': {
|
||||
'ComparisonOperator': 'NULL',
|
||||
}
|
||||
})
|
||||
returned_item = table.get_item(Key={'username': 'johndoe'})
|
||||
assert dict(returned_item)['Item']['foo'].should.equal("baz")
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_update_item_conditions_pass_because_expect_exists_by_compare_to_not_null():
|
||||
table = _create_user_table()
|
||||
table.put_item(Item={'username': 'johndoe', 'foo': 'bar'})
|
||||
table.update_item(
|
||||
Key={'username': 'johndoe'},
|
||||
UpdateExpression='SET foo=baz',
|
||||
Expected={
|
||||
'foo': {
|
||||
'ComparisonOperator': 'NOT_NULL',
|
||||
}
|
||||
})
|
||||
returned_item = table.get_item(Key={'username': 'johndoe'})
|
||||
assert dict(returned_item)['Item']['foo'].should.equal("baz")
|
||||
|
||||
@mock_dynamodb2
|
||||
def test_boto3_put_item_conditions_pass():
|
||||
table = _create_user_table()
|
||||
|
@ -199,7 +199,7 @@ def test_igw_desribe():
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_desribe_bad_id():
|
||||
def test_igw_describe_bad_id():
|
||||
""" internet gateway fail to fetch by bad id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
|
@ -289,9 +289,31 @@ def test_list_images():
|
||||
len(response['imageIds']).should.be(1)
|
||||
response['imageIds'][0]['imageTag'].should.equal('oldest')
|
||||
|
||||
response = client.list_images(repositoryName='test_repository_2', registryId='109876543210')
|
||||
type(response['imageIds']).should.be(list)
|
||||
len(response['imageIds']).should.be(0)
|
||||
|
||||
@mock_ecr
|
||||
def test_list_images_from_repository_that_doesnt_exist():
|
||||
client = boto3.client('ecr', region_name='us-east-1')
|
||||
_ = client.create_repository(
|
||||
repositoryName='test_repository_1'
|
||||
)
|
||||
|
||||
# non existing repo
|
||||
error_msg = re.compile(
|
||||
r".*The repository with name 'repo-that-doesnt-exist' does not exist in the registry with id '123'.*",
|
||||
re.MULTILINE)
|
||||
client.list_images.when.called_with(
|
||||
repositoryName='repo-that-doesnt-exist',
|
||||
registryId='123',
|
||||
).should.throw(Exception, error_msg)
|
||||
|
||||
# repo does not exist in specified registry
|
||||
error_msg = re.compile(
|
||||
r".*The repository with name 'test_repository_1' does not exist in the registry with id '222'.*",
|
||||
re.MULTILINE)
|
||||
client.list_images.when.called_with(
|
||||
repositoryName='test_repository_1',
|
||||
registryId='222',
|
||||
).should.throw(Exception, error_msg)
|
||||
|
||||
|
||||
@mock_ecr
|
||||
|
@ -2,6 +2,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
import boto3
|
||||
import sure # noqa
|
||||
import json
|
||||
@ -450,6 +451,21 @@ def test_update_service():
|
||||
response['service']['desiredCount'].should.equal(0)
|
||||
|
||||
|
||||
@mock_ecs
|
||||
def test_update_missing_service():
|
||||
client = boto3.client('ecs', region_name='us-east-1')
|
||||
_ = client.create_cluster(
|
||||
clusterName='test_ecs_cluster'
|
||||
)
|
||||
|
||||
client.update_service.when.called_with(
|
||||
cluster='test_ecs_cluster',
|
||||
service='test_ecs_service',
|
||||
taskDefinition='test_ecs_task',
|
||||
desiredCount=0
|
||||
).should.throw(ClientError)
|
||||
|
||||
|
||||
@mock_ecs
|
||||
def test_delete_service():
|
||||
client = boto3.client('ecs', region_name='us-east-1')
|
||||
@ -1054,6 +1070,13 @@ def test_describe_tasks():
|
||||
set([response['tasks'][0]['taskArn'], response['tasks']
|
||||
[1]['taskArn']]).should.equal(set(tasks_arns))
|
||||
|
||||
# Test we can pass task ids instead of ARNs
|
||||
response = client.describe_tasks(
|
||||
cluster='test_ecs_cluster',
|
||||
tasks=[tasks_arns[0].split("/")[-1]]
|
||||
)
|
||||
len(response['tasks']).should.equal(1)
|
||||
|
||||
|
||||
@mock_ecs
|
||||
def describe_task_definition():
|
||||
|
1
tests/test_glue/__init__.py
Normal file
1
tests/test_glue/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from __future__ import unicode_literals
|
1
tests/test_glue/fixtures/__init__.py
Normal file
1
tests/test_glue/fixtures/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from __future__ import unicode_literals
|
31
tests/test_glue/fixtures/datacatalog.py
Normal file
31
tests/test_glue/fixtures/datacatalog.py
Normal file
@ -0,0 +1,31 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
TABLE_INPUT = {
|
||||
'Owner': 'a_fake_owner',
|
||||
'Parameters': {
|
||||
'EXTERNAL': 'TRUE',
|
||||
},
|
||||
'Retention': 0,
|
||||
'StorageDescriptor': {
|
||||
'BucketColumns': [],
|
||||
'Compressed': False,
|
||||
'InputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat',
|
||||
'NumberOfBuckets': -1,
|
||||
'OutputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat',
|
||||
'Parameters': {},
|
||||
'SerdeInfo': {
|
||||
'Parameters': {
|
||||
'serialization.format': '1'
|
||||
},
|
||||
'SerializationLibrary': 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
|
||||
},
|
||||
'SkewedInfo': {
|
||||
'SkewedColumnNames': [],
|
||||
'SkewedColumnValueLocationMaps': {},
|
||||
'SkewedColumnValues': []
|
||||
},
|
||||
'SortColumns': [],
|
||||
'StoredAsSubDirectories': False
|
||||
},
|
||||
'TableType': 'EXTERNAL_TABLE',
|
||||
}
|
46
tests/test_glue/helpers.py
Normal file
46
tests/test_glue/helpers.py
Normal file
@ -0,0 +1,46 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
|
||||
from .fixtures.datacatalog import TABLE_INPUT
|
||||
|
||||
|
||||
def create_database(client, database_name):
|
||||
return client.create_database(
|
||||
DatabaseInput={
|
||||
'Name': database_name
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_database(client, database_name):
|
||||
return client.get_database(Name=database_name)
|
||||
|
||||
|
||||
def create_table_input(table_name, s3_location, columns=[], partition_keys=[]):
|
||||
table_input = copy.deepcopy(TABLE_INPUT)
|
||||
table_input['Name'] = table_name
|
||||
table_input['PartitionKeys'] = partition_keys
|
||||
table_input['StorageDescriptor']['Columns'] = columns
|
||||
table_input['StorageDescriptor']['Location'] = s3_location
|
||||
return table_input
|
||||
|
||||
|
||||
def create_table(client, database_name, table_name, table_input):
|
||||
return client.create_table(
|
||||
DatabaseName=database_name,
|
||||
TableInput=table_input
|
||||
)
|
||||
|
||||
|
||||
def get_table(client, database_name, table_name):
|
||||
return client.get_table(
|
||||
DatabaseName=database_name,
|
||||
Name=table_name
|
||||
)
|
||||
|
||||
|
||||
def get_tables(client, database_name):
|
||||
return client.get_tables(
|
||||
DatabaseName=database_name
|
||||
)
|
108
tests/test_glue/test_datacatalog.py
Normal file
108
tests/test_glue/test_datacatalog.py
Normal file
@ -0,0 +1,108 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
import boto3
|
||||
from botocore.client import ClientError
|
||||
|
||||
from moto import mock_glue
|
||||
from . import helpers
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_database():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
response = helpers.get_database(client, database_name)
|
||||
database = response['Database']
|
||||
|
||||
database.should.equal({'Name': database_name})
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_database_already_exists():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'cantcreatethisdatabasetwice'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('DatabaseAlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_table():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'myspecialtable'
|
||||
s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
|
||||
table_input = helpers.create_table_input(table_name, s3_location)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
response = helpers.get_table(client, database_name, table_name)
|
||||
table = response['Table']
|
||||
|
||||
table['Name'].should.equal(table_input['Name'])
|
||||
table['StorageDescriptor'].should.equal(table_input['StorageDescriptor'])
|
||||
table['PartitionKeys'].should.equal(table_input['PartitionKeys'])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_table_already_exists():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'cantcreatethistabletwice'
|
||||
s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
|
||||
table_input = helpers.create_table_input(table_name, s3_location)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('TableAlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_tables():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_names = ['myfirsttable', 'mysecondtable', 'mythirdtable']
|
||||
table_inputs = {}
|
||||
|
||||
for table_name in table_names:
|
||||
s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
table_input = helpers.create_table_input(table_name, s3_location)
|
||||
table_inputs[table_name] = table_input
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
response = helpers.get_tables(client, database_name)
|
||||
|
||||
tables = response['TableList']
|
||||
|
||||
assert len(tables) == 3
|
||||
|
||||
for table in tables:
|
||||
table_name = table['Name']
|
||||
table_name.should.equal(table_inputs[table_name]['Name'])
|
||||
table['StorageDescriptor'].should.equal(table_inputs[table_name]['StorageDescriptor'])
|
||||
table['PartitionKeys'].should.equal(table_inputs[table_name]['PartitionKeys'])
|
@ -262,18 +262,27 @@ def test_update_assume_role_policy():
|
||||
role.assume_role_policy_document.should.equal("my-policy")
|
||||
|
||||
|
||||
@mock_iam
|
||||
def test_create_policy():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
response = conn.create_policy(
|
||||
PolicyName="TestCreatePolicy",
|
||||
PolicyDocument='{"some":"policy"}')
|
||||
response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestCreatePolicy")
|
||||
|
||||
|
||||
@mock_iam
|
||||
def test_create_policy_versions():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
with assert_raises(ClientError):
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestCreatePolicyVersion",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
|
||||
PolicyDocument='{"some":"policy"}')
|
||||
conn.create_policy(
|
||||
PolicyName="TestCreatePolicyVersion",
|
||||
PolicyDocument='{"some":"policy"}')
|
||||
version = conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestCreatePolicyVersion",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestCreatePolicyVersion",
|
||||
PolicyDocument='{"some":"policy"}')
|
||||
version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'})
|
||||
|
||||
@ -285,14 +294,14 @@ def test_get_policy_version():
|
||||
PolicyName="TestGetPolicyVersion",
|
||||
PolicyDocument='{"some":"policy"}')
|
||||
version = conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestGetPolicyVersion",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion",
|
||||
PolicyDocument='{"some":"policy"}')
|
||||
with assert_raises(ClientError):
|
||||
conn.get_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestGetPolicyVersion",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion",
|
||||
VersionId='v2-does-not-exist')
|
||||
retrieved = conn.get_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestGetPolicyVersion",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicyVersion",
|
||||
VersionId=version.get('PolicyVersion').get('VersionId'))
|
||||
retrieved.get('PolicyVersion').get('Document').should.equal({'some': 'policy'})
|
||||
|
||||
@ -302,18 +311,18 @@ def test_list_policy_versions():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
with assert_raises(ClientError):
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestListPolicyVersions")
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
|
||||
conn.create_policy(
|
||||
PolicyName="TestListPolicyVersions",
|
||||
PolicyDocument='{"some":"policy"}')
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestListPolicyVersions",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
|
||||
PolicyDocument='{"first":"policy"}')
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestListPolicyVersions",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
|
||||
PolicyDocument='{"second":"policy"}')
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestListPolicyVersions")
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
|
||||
versions.get('Versions')[0].get('Document').should.equal({'first': 'policy'})
|
||||
versions.get('Versions')[1].get('Document').should.equal({'second': 'policy'})
|
||||
|
||||
@ -325,17 +334,17 @@ def test_delete_policy_version():
|
||||
PolicyName="TestDeletePolicyVersion",
|
||||
PolicyDocument='{"some":"policy"}')
|
||||
conn.create_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestDeletePolicyVersion",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
|
||||
PolicyDocument='{"first":"policy"}')
|
||||
with assert_raises(ClientError):
|
||||
conn.delete_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestDeletePolicyVersion",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
|
||||
VersionId='v2-nope-this-does-not-exist')
|
||||
conn.delete_policy_version(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestDeletePolicyVersion",
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
|
||||
VersionId='v1')
|
||||
versions = conn.list_policy_versions(
|
||||
PolicyArn="arn:aws:iam::aws:policy/TestDeletePolicyVersion")
|
||||
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion")
|
||||
len(versions.get('Versions')).should.equal(0)
|
||||
|
||||
|
||||
@ -669,3 +678,68 @@ def test_update_access_key():
|
||||
Status='Inactive')
|
||||
resp = client.list_access_keys(UserName=username)
|
||||
resp['AccessKeyMetadata'][0]['Status'].should.equal('Inactive')
|
||||
|
||||
|
||||
@mock_iam
|
||||
def test_get_account_authorization_details():
|
||||
import json
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
|
||||
conn.create_user(Path='/', UserName='testCloudAuxUser')
|
||||
conn.create_group(Path='/', GroupName='testCloudAuxGroup')
|
||||
conn.create_policy(
|
||||
PolicyName='testCloudAuxPolicy',
|
||||
Path='/',
|
||||
PolicyDocument=json.dumps({
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Action": "s3:ListBucket",
|
||||
"Resource": "*",
|
||||
"Effect": "Allow",
|
||||
}
|
||||
]
|
||||
}),
|
||||
Description='Test CloudAux Policy'
|
||||
)
|
||||
|
||||
result = conn.get_account_authorization_details(Filter=['Role'])
|
||||
len(result['RoleDetailList']) == 1
|
||||
len(result['UserDetailList']) == 0
|
||||
len(result['GroupDetailList']) == 0
|
||||
len(result['Policies']) == 0
|
||||
|
||||
result = conn.get_account_authorization_details(Filter=['User'])
|
||||
len(result['RoleDetailList']) == 0
|
||||
len(result['UserDetailList']) == 1
|
||||
len(result['GroupDetailList']) == 0
|
||||
len(result['Policies']) == 0
|
||||
|
||||
result = conn.get_account_authorization_details(Filter=['Group'])
|
||||
len(result['RoleDetailList']) == 0
|
||||
len(result['UserDetailList']) == 0
|
||||
len(result['GroupDetailList']) == 1
|
||||
len(result['Policies']) == 0
|
||||
|
||||
result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy'])
|
||||
len(result['RoleDetailList']) == 0
|
||||
len(result['UserDetailList']) == 0
|
||||
len(result['GroupDetailList']) == 0
|
||||
len(result['Policies']) == 1
|
||||
|
||||
# Check for greater than 1 since this should always be greater than one but might change.
|
||||
# See iam/aws_managed_policies.py
|
||||
result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy'])
|
||||
len(result['RoleDetailList']) == 0
|
||||
len(result['UserDetailList']) == 0
|
||||
len(result['GroupDetailList']) == 0
|
||||
len(result['Policies']) > 1
|
||||
|
||||
result = conn.get_account_authorization_details()
|
||||
len(result['RoleDetailList']) == 1
|
||||
len(result['UserDetailList']) == 1
|
||||
len(result['GroupDetailList']) == 1
|
||||
len(result['Policies']) > 1
|
||||
|
||||
|
||||
|
||||
|
@ -89,6 +89,7 @@ def test_basic_shard_iterator():
|
||||
response = conn.get_records(shard_iterator)
|
||||
shard_iterator = response['NextShardIterator']
|
||||
response['Records'].should.equal([])
|
||||
response['MillisBehindLatest'].should.equal(0)
|
||||
|
||||
|
||||
@mock_kinesis_deprecated
|
||||
@ -225,6 +226,7 @@ def test_get_records_after_sequence_number():
|
||||
response = conn.get_records(shard_iterator)
|
||||
# And the first result returned should be the third item
|
||||
response['Records'][0]['Data'].should.equal('3')
|
||||
response['MillisBehindLatest'].should.equal(0)
|
||||
|
||||
|
||||
@mock_kinesis_deprecated
|
||||
@ -262,6 +264,7 @@ def test_get_records_latest():
|
||||
response['Records'].should.have.length_of(1)
|
||||
response['Records'][0]['PartitionKey'].should.equal('last_record')
|
||||
response['Records'][0]['Data'].should.equal('last_record')
|
||||
response['MillisBehindLatest'].should.equal(0)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
@ -305,6 +308,7 @@ def test_get_records_at_timestamp():
|
||||
response['Records'].should.have.length_of(len(keys))
|
||||
partition_keys = [r['PartitionKey'] for r in response['Records']]
|
||||
partition_keys.should.equal(keys)
|
||||
response['MillisBehindLatest'].should.equal(0)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
@ -330,10 +334,69 @@ def test_get_records_at_very_old_timestamp():
|
||||
shard_iterator = response['ShardIterator']
|
||||
|
||||
response = conn.get_records(ShardIterator=shard_iterator)
|
||||
|
||||
response['Records'].should.have.length_of(len(keys))
|
||||
partition_keys = [r['PartitionKey'] for r in response['Records']]
|
||||
partition_keys.should.equal(keys)
|
||||
response['MillisBehindLatest'].should.equal(0)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_get_records_timestamp_filtering():
|
||||
conn = boto3.client('kinesis', region_name="us-west-2")
|
||||
stream_name = "my_stream"
|
||||
conn.create_stream(StreamName=stream_name, ShardCount=1)
|
||||
|
||||
conn.put_record(StreamName=stream_name,
|
||||
Data='0',
|
||||
PartitionKey='0')
|
||||
|
||||
time.sleep(1.0)
|
||||
timestamp = datetime.datetime.utcnow()
|
||||
|
||||
conn.put_record(StreamName=stream_name,
|
||||
Data='1',
|
||||
PartitionKey='1')
|
||||
|
||||
response = conn.describe_stream(StreamName=stream_name)
|
||||
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
|
||||
response = conn.get_shard_iterator(StreamName=stream_name,
|
||||
ShardId=shard_id,
|
||||
ShardIteratorType='AT_TIMESTAMP',
|
||||
Timestamp=timestamp)
|
||||
shard_iterator = response['ShardIterator']
|
||||
|
||||
response = conn.get_records(ShardIterator=shard_iterator)
|
||||
response['Records'].should.have.length_of(1)
|
||||
response['Records'][0]['PartitionKey'].should.equal('1')
|
||||
response['Records'][0]['ApproximateArrivalTimestamp'].should.be.\
|
||||
greater_than(timestamp)
|
||||
response['MillisBehindLatest'].should.equal(0)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_get_records_millis_behind_latest():
|
||||
conn = boto3.client('kinesis', region_name="us-west-2")
|
||||
stream_name = "my_stream"
|
||||
conn.create_stream(StreamName=stream_name, ShardCount=1)
|
||||
|
||||
conn.put_record(StreamName=stream_name,
|
||||
Data='0',
|
||||
PartitionKey='0')
|
||||
time.sleep(1.0)
|
||||
conn.put_record(StreamName=stream_name,
|
||||
Data='1',
|
||||
PartitionKey='1')
|
||||
|
||||
response = conn.describe_stream(StreamName=stream_name)
|
||||
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
|
||||
response = conn.get_shard_iterator(StreamName=stream_name,
|
||||
ShardId=shard_id,
|
||||
ShardIteratorType='TRIM_HORIZON')
|
||||
shard_iterator = response['ShardIterator']
|
||||
|
||||
response = conn.get_records(ShardIterator=shard_iterator, Limit=1)
|
||||
response['Records'].should.have.length_of(1)
|
||||
response['MillisBehindLatest'].should.be.greater_than(0)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
@ -363,6 +426,7 @@ def test_get_records_at_very_new_timestamp():
|
||||
response = conn.get_records(ShardIterator=shard_iterator)
|
||||
|
||||
response['Records'].should.have.length_of(0)
|
||||
response['MillisBehindLatest'].should.equal(0)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
@ -385,6 +449,7 @@ def test_get_records_from_empty_stream_at_timestamp():
|
||||
response = conn.get_records(ShardIterator=shard_iterator)
|
||||
|
||||
response['Records'].should.have.length_of(0)
|
||||
response['MillisBehindLatest'].should.equal(0)
|
||||
|
||||
|
||||
@mock_kinesis_deprecated
|
||||
|
@ -225,6 +225,29 @@ def test_multipart_invalid_order():
|
||||
bucket.complete_multipart_upload.when.called_with(
|
||||
multipart.key_name, multipart.id, xml).should.throw(S3ResponseError)
|
||||
|
||||
@mock_s3_deprecated
|
||||
@reduced_min_part_size
|
||||
def test_multipart_etag_quotes_stripped():
|
||||
# Create Bucket so that test can run
|
||||
conn = boto.connect_s3('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket('mybucket')
|
||||
|
||||
multipart = bucket.initiate_multipart_upload("the-key")
|
||||
part1 = b'0' * REDUCED_PART_SIZE
|
||||
etag1 = multipart.upload_part_from_file(BytesIO(part1), 1).etag
|
||||
# last part, can be less than 5 MB
|
||||
part2 = b'1'
|
||||
etag2 = multipart.upload_part_from_file(BytesIO(part2), 2).etag
|
||||
# Strip quotes from etags
|
||||
etag1 = etag1.replace('"','')
|
||||
etag2 = etag2.replace('"','')
|
||||
xml = "<Part><PartNumber>{0}</PartNumber><ETag>{1}</ETag></Part>"
|
||||
xml = xml.format(1, etag1) + xml.format(2, etag2)
|
||||
xml = "<CompleteMultipartUpload>{0}</CompleteMultipartUpload>".format(xml)
|
||||
bucket.complete_multipart_upload.when.called_with(
|
||||
multipart.key_name, multipart.id, xml).should_not.throw(S3ResponseError)
|
||||
# we should get both parts as the key contents
|
||||
bucket.get_key("the-key").etag.should.equal(EXPECTED_ETAG)
|
||||
|
||||
@mock_s3_deprecated
|
||||
@reduced_min_part_size
|
||||
@ -2362,6 +2385,35 @@ def test_boto3_list_object_versions():
|
||||
response['Body'].read().should.equal(items[-1])
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_bad_prefix_list_object_versions():
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
bucket_name = 'mybucket'
|
||||
key = 'key-with-versions'
|
||||
bad_prefix = 'key-that-does-not-exist'
|
||||
s3.create_bucket(Bucket=bucket_name)
|
||||
s3.put_bucket_versioning(
|
||||
Bucket=bucket_name,
|
||||
VersioningConfiguration={
|
||||
'Status': 'Enabled'
|
||||
}
|
||||
)
|
||||
items = (six.b('v1'), six.b('v2'))
|
||||
for body in items:
|
||||
s3.put_object(
|
||||
Bucket=bucket_name,
|
||||
Key=key,
|
||||
Body=body
|
||||
)
|
||||
response = s3.list_object_versions(
|
||||
Bucket=bucket_name,
|
||||
Prefix=bad_prefix,
|
||||
)
|
||||
response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
|
||||
response.should_not.contain('Versions')
|
||||
response.should_not.contain('DeleteMarkers')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_delete_markers():
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
|
@ -3,11 +3,179 @@ from __future__ import unicode_literals
|
||||
import boto3
|
||||
|
||||
from moto import mock_secretsmanager
|
||||
from botocore.exceptions import ClientError
|
||||
import sure # noqa
|
||||
import string
|
||||
import unittest
|
||||
from nose.tools import assert_raises
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_secret_value():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
create_secret = conn.create_secret(Name='java-util-test-password',
|
||||
SecretString="foosecret")
|
||||
result = conn.get_secret_value(SecretId='java-util-test-password')
|
||||
assert result['SecretString'] == 'mysecretstring'
|
||||
assert result['SecretString'] == 'foosecret'
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_secret_that_does_not_exist():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
with assert_raises(ClientError):
|
||||
result = conn.get_secret_value(SecretId='i-dont-exist')
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_secret_with_mismatched_id():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
create_secret = conn.create_secret(Name='java-util-test-password',
|
||||
SecretString="foosecret")
|
||||
|
||||
with assert_raises(ClientError):
|
||||
result = conn.get_secret_value(SecretId='i-dont-exist')
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_create_secret():
|
||||
conn = boto3.client('secretsmanager', region_name='us-east-1')
|
||||
|
||||
result = conn.create_secret(Name='test-secret', SecretString="foosecret")
|
||||
assert result['ARN'] == (
|
||||
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
|
||||
assert result['Name'] == 'test-secret'
|
||||
secret = conn.get_secret_value(SecretId='test-secret')
|
||||
assert secret['SecretString'] == 'foosecret'
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_password_default_length():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password()
|
||||
assert len(random_password['RandomPassword']) == 32
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_password_default_requirements():
|
||||
# When require_each_included_type, default true
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password()
|
||||
# Should contain lowercase, upppercase, digit, special character
|
||||
assert any(c.islower() for c in random_password['RandomPassword'])
|
||||
assert any(c.isupper() for c in random_password['RandomPassword'])
|
||||
assert any(c.isdigit() for c in random_password['RandomPassword'])
|
||||
assert any(c in string.punctuation
|
||||
for c in random_password['RandomPassword'])
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_password_custom_length():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=50)
|
||||
assert len(random_password['RandomPassword']) == 50
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_exclude_lowercase():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=55,
|
||||
ExcludeLowercase=True)
|
||||
assert any(c.islower() for c in random_password['RandomPassword']) == False
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_exclude_uppercase():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=55,
|
||||
ExcludeUppercase=True)
|
||||
assert any(c.isupper() for c in random_password['RandomPassword']) == False
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_exclude_characters_and_symbols():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=20,
|
||||
ExcludeCharacters='xyzDje@?!.')
|
||||
assert any(c in 'xyzDje@?!.' for c in random_password['RandomPassword']) == False
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_exclude_numbers():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=100,
|
||||
ExcludeNumbers=True)
|
||||
assert any(c.isdigit() for c in random_password['RandomPassword']) == False
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_exclude_punctuation():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=100,
|
||||
ExcludePunctuation=True)
|
||||
assert any(c in string.punctuation
|
||||
for c in random_password['RandomPassword']) == False
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_include_space_false():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=300)
|
||||
assert any(c.isspace() for c in random_password['RandomPassword']) == False
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_include_space_true():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=4,
|
||||
IncludeSpace=True)
|
||||
assert any(c.isspace() for c in random_password['RandomPassword']) == True
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_require_each_included_type():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
random_password = conn.get_random_password(PasswordLength=4,
|
||||
RequireEachIncludedType=True)
|
||||
assert any(c in string.punctuation for c in random_password['RandomPassword']) == True
|
||||
assert any(c in string.ascii_lowercase for c in random_password['RandomPassword']) == True
|
||||
assert any(c in string.ascii_uppercase for c in random_password['RandomPassword']) == True
|
||||
assert any(c in string.digits for c in random_password['RandomPassword']) == True
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_too_short_password():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
with assert_raises(ClientError):
|
||||
random_password = conn.get_random_password(PasswordLength=3)
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_random_too_long_password():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
with assert_raises(Exception):
|
||||
random_password = conn.get_random_password(PasswordLength=5555)
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_describe_secret():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
conn.create_secret(Name='test-secret',
|
||||
SecretString='foosecret')
|
||||
|
||||
secret_description = conn.describe_secret(SecretId='test-secret')
|
||||
assert secret_description # Returned dict is not empty
|
||||
assert secret_description['ARN'] == (
|
||||
'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad')
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_describe_secret_that_does_not_exist():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
|
||||
with assert_raises(ClientError):
|
||||
result = conn.get_secret_value(SecretId='i-dont-exist')
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_describe_secret_that_does_not_match():
|
||||
conn = boto3.client('secretsmanager', region_name='us-west-2')
|
||||
conn.create_secret(Name='test-secret',
|
||||
SecretString='foosecret')
|
||||
|
||||
with assert_raises(ClientError):
|
||||
result = conn.get_secret_value(SecretId='i-dont-match')
|
||||
|
@ -7,7 +7,7 @@ import moto.server as server
|
||||
from moto import mock_secretsmanager
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
Test the different server responses for secretsmanager
|
||||
'''
|
||||
|
||||
|
||||
@ -17,11 +17,119 @@ def test_get_secret_value():
|
||||
backend = server.create_backend_app("secretsmanager")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.post('/',
|
||||
data={"SecretId": "test", "VersionStage": "AWSCURRENT"},
|
||||
create_secret = test_client.post('/',
|
||||
data={"Name": "test-secret",
|
||||
"SecretString": "foo-secret"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.CreateSecret"},
|
||||
)
|
||||
get_secret = test_client.post('/',
|
||||
data={"SecretId": "test-secret",
|
||||
"VersionStage": "AWSCURRENT"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.GetSecretValue"},
|
||||
)
|
||||
|
||||
json_data = json.loads(get_secret.data.decode("utf-8"))
|
||||
assert json_data['SecretString'] == 'foo-secret'
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_get_secret_that_does_not_exist():
|
||||
|
||||
backend = server.create_backend_app("secretsmanager")
|
||||
test_client = backend.test_client()
|
||||
|
||||
get_secret = test_client.post('/',
|
||||
data={"SecretId": "i-dont-exist",
|
||||
"VersionStage": "AWSCURRENT"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.GetSecretValue"},
|
||||
)
|
||||
json_data = json.loads(get_secret.data.decode("utf-8"))
|
||||
assert json_data['message'] == "Secrets Manager can't find the specified secret"
|
||||
assert json_data['__type'] == 'ResourceNotFoundException'
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_create_secret():
|
||||
|
||||
backend = server.create_backend_app("secretsmanager")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.post('/',
|
||||
data={"Name": "test-secret",
|
||||
"SecretString": "foo-secret"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.CreateSecret"},
|
||||
)
|
||||
|
||||
json_data = json.loads(res.data.decode("utf-8"))
|
||||
assert json_data['SecretString'] == "mysecretstring"
|
||||
assert json_data['ARN'] == (
|
||||
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
|
||||
assert json_data['Name'] == 'test-secret'
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_describe_secret():
|
||||
|
||||
backend = server.create_backend_app('secretsmanager')
|
||||
test_client = backend.test_client()
|
||||
|
||||
create_secret = test_client.post('/',
|
||||
data={"Name": "test-secret",
|
||||
"SecretString": "foosecret"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.CreateSecret"
|
||||
},
|
||||
)
|
||||
describe_secret = test_client.post('/',
|
||||
data={"SecretId": "test-secret"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.DescribeSecret"
|
||||
},
|
||||
)
|
||||
|
||||
json_data = json.loads(describe_secret.data.decode("utf-8"))
|
||||
assert json_data # Returned dict is not empty
|
||||
assert json_data['ARN'] == (
|
||||
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
|
||||
)
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_describe_secret_that_does_not_exist():
|
||||
|
||||
backend = server.create_backend_app('secretsmanager')
|
||||
test_client = backend.test_client()
|
||||
|
||||
describe_secret = test_client.post('/',
|
||||
data={"SecretId": "i-dont-exist"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.DescribeSecret"
|
||||
},
|
||||
)
|
||||
|
||||
json_data = json.loads(describe_secret.data.decode("utf-8"))
|
||||
assert json_data['message'] == "Secrets Manager can't find the specified secret"
|
||||
assert json_data['__type'] == 'ResourceNotFoundException'
|
||||
|
||||
@mock_secretsmanager
|
||||
def test_describe_secret_that_does_not_match():
|
||||
|
||||
backend = server.create_backend_app('secretsmanager')
|
||||
test_client = backend.test_client()
|
||||
|
||||
create_secret = test_client.post('/',
|
||||
data={"Name": "test-secret",
|
||||
"SecretString": "foosecret"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.CreateSecret"
|
||||
},
|
||||
)
|
||||
describe_secret = test_client.post('/',
|
||||
data={"SecretId": "i-dont-match"},
|
||||
headers={
|
||||
"X-Amz-Target": "secretsmanager.DescribeSecret"
|
||||
},
|
||||
)
|
||||
|
||||
json_data = json.loads(describe_secret.data.decode("utf-8"))
|
||||
assert json_data['message'] == "Secrets Manager can't find the specified secret"
|
||||
assert json_data['__type'] == 'ResourceNotFoundException'
|
||||
|
@ -182,6 +182,72 @@ def test_subscription_paging():
|
||||
topic1_subscriptions.shouldnt.have("NextToken")
|
||||
|
||||
|
||||
@mock_sns
|
||||
def test_creating_subscription_with_attributes():
|
||||
conn = boto3.client('sns', region_name='us-east-1')
|
||||
conn.create_topic(Name="some-topic")
|
||||
response = conn.list_topics()
|
||||
topic_arn = response["Topics"][0]['TopicArn']
|
||||
|
||||
delivery_policy = json.dumps({
|
||||
'healthyRetryPolicy': {
|
||||
"numRetries": 10,
|
||||
"minDelayTarget": 1,
|
||||
"maxDelayTarget":2
|
||||
}
|
||||
})
|
||||
|
||||
filter_policy = json.dumps({
|
||||
"store": ["example_corp"],
|
||||
"event": ["order_cancelled"],
|
||||
"encrypted": [False],
|
||||
"customer_interests": ["basketball", "baseball"]
|
||||
})
|
||||
|
||||
conn.subscribe(TopicArn=topic_arn,
|
||||
Protocol="http",
|
||||
Endpoint="http://example.com/",
|
||||
Attributes={
|
||||
'RawMessageDelivery': 'true',
|
||||
'DeliveryPolicy': delivery_policy,
|
||||
'FilterPolicy': filter_policy
|
||||
})
|
||||
|
||||
subscriptions = conn.list_subscriptions()["Subscriptions"]
|
||||
subscriptions.should.have.length_of(1)
|
||||
subscription = subscriptions[0]
|
||||
subscription["TopicArn"].should.equal(topic_arn)
|
||||
subscription["Protocol"].should.equal("http")
|
||||
subscription["SubscriptionArn"].should.contain(topic_arn)
|
||||
subscription["Endpoint"].should.equal("http://example.com/")
|
||||
|
||||
# Test the subscription attributes have been set
|
||||
subscription_arn = subscription["SubscriptionArn"]
|
||||
attrs = conn.get_subscription_attributes(
|
||||
SubscriptionArn=subscription_arn
|
||||
)
|
||||
|
||||
attrs['Attributes']['RawMessageDelivery'].should.equal('true')
|
||||
attrs['Attributes']['DeliveryPolicy'].should.equal(delivery_policy)
|
||||
attrs['Attributes']['FilterPolicy'].should.equal(filter_policy)
|
||||
|
||||
# Now unsubscribe the subscription
|
||||
conn.unsubscribe(SubscriptionArn=subscription["SubscriptionArn"])
|
||||
|
||||
# And there should be zero subscriptions left
|
||||
subscriptions = conn.list_subscriptions()["Subscriptions"]
|
||||
subscriptions.should.have.length_of(0)
|
||||
|
||||
# invalid attr name
|
||||
with assert_raises(ClientError):
|
||||
conn.subscribe(TopicArn=topic_arn,
|
||||
Protocol="http",
|
||||
Endpoint="http://example.com/",
|
||||
Attributes={
|
||||
'InvalidName': 'true'
|
||||
})
|
||||
|
||||
|
||||
@mock_sns
|
||||
def test_set_subscription_attributes():
|
||||
conn = boto3.client('sns', region_name='us-east-1')
|
||||
|
@ -4,6 +4,10 @@ import boto3
|
||||
import botocore.exceptions
|
||||
import sure # noqa
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_ssm
|
||||
|
||||
@ -95,6 +99,27 @@ def test_get_parameters_by_path():
|
||||
Type='SecureString',
|
||||
KeyId='alias/aws/ssm')
|
||||
|
||||
client.put_parameter(
|
||||
Name='foo',
|
||||
Description='A test parameter',
|
||||
Value='bar',
|
||||
Type='String')
|
||||
|
||||
client.put_parameter(
|
||||
Name='baz',
|
||||
Description='A test parameter',
|
||||
Value='qux',
|
||||
Type='String')
|
||||
|
||||
response = client.get_parameters_by_path(Path='/', Recursive=False)
|
||||
len(response['Parameters']).should.equal(2)
|
||||
{p['Value'] for p in response['Parameters']}.should.equal(
|
||||
set(['bar', 'qux'])
|
||||
)
|
||||
|
||||
response = client.get_parameters_by_path(Path='/', Recursive=True)
|
||||
len(response['Parameters']).should.equal(9)
|
||||
|
||||
response = client.get_parameters_by_path(Path='/foo')
|
||||
len(response['Parameters']).should.equal(2)
|
||||
{p['Value'] for p in response['Parameters']}.should.equal(
|
||||
@ -417,6 +442,7 @@ def test_describe_parameters_filter_keyid():
|
||||
response['Parameters'][0]['Type'].should.equal('SecureString')
|
||||
''.should.equal(response.get('NextToken', ''))
|
||||
|
||||
|
||||
@mock_ssm
|
||||
def test_describe_parameters_attributes():
|
||||
client = boto3.client('ssm', region_name='us-east-1')
|
||||
@ -445,6 +471,7 @@ def test_describe_parameters_attributes():
|
||||
response['Parameters'][1].get('Description').should.be.none
|
||||
response['Parameters'][1]['Version'].should.equal(1)
|
||||
|
||||
|
||||
@mock_ssm
|
||||
def test_get_parameter_invalid():
|
||||
client = client = boto3.client('ssm', region_name='us-east-1')
|
||||
@ -585,3 +612,59 @@ def test_send_command():
|
||||
cmd['OutputS3KeyPrefix'].should.equal('pref')
|
||||
|
||||
cmd['ExpiresAfter'].should.be.greater_than(before)
|
||||
|
||||
# test sending a command without any optional parameters
|
||||
response = client.send_command(
|
||||
DocumentName=ssm_document)
|
||||
|
||||
cmd = response['Command']
|
||||
|
||||
cmd['CommandId'].should_not.be(None)
|
||||
cmd['DocumentName'].should.equal(ssm_document)
|
||||
|
||||
|
||||
@mock_ssm
|
||||
def test_list_commands():
|
||||
client = boto3.client('ssm', region_name='us-east-1')
|
||||
|
||||
ssm_document = 'AWS-RunShellScript'
|
||||
params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
|
||||
|
||||
response = client.send_command(
|
||||
InstanceIds=['i-123456'],
|
||||
DocumentName=ssm_document,
|
||||
Parameters=params,
|
||||
OutputS3Region='us-east-2',
|
||||
OutputS3BucketName='the-bucket',
|
||||
OutputS3KeyPrefix='pref')
|
||||
|
||||
cmd = response['Command']
|
||||
cmd_id = cmd['CommandId']
|
||||
|
||||
# get the command by id
|
||||
response = client.list_commands(
|
||||
CommandId=cmd_id)
|
||||
|
||||
cmds = response['Commands']
|
||||
len(cmds).should.equal(1)
|
||||
cmds[0]['CommandId'].should.equal(cmd_id)
|
||||
|
||||
# add another command with the same instance id to test listing by
|
||||
# instance id
|
||||
client.send_command(
|
||||
InstanceIds=['i-123456'],
|
||||
DocumentName=ssm_document)
|
||||
|
||||
response = client.list_commands(
|
||||
InstanceId='i-123456')
|
||||
|
||||
cmds = response['Commands']
|
||||
len(cmds).should.equal(2)
|
||||
|
||||
for cmd in cmds:
|
||||
cmd['InstanceIds'].should.contain('i-123456')
|
||||
|
||||
# test the error case for an invalid command id
|
||||
with assert_raises(ClientError):
|
||||
response = client.list_commands(
|
||||
CommandId=str(uuid.uuid4()))
|
||||
|
Loading…
x
Reference in New Issue
Block a user