Merge pull request #14 from spulec/master

Merge upstream
This commit is contained in:
Bert Blommers 2019-11-07 15:36:46 +00:00 committed by GitHub
commit 0c3d7c4f04
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 2492 additions and 190 deletions

View File

@ -1,6 +1,5 @@
dist: xenial
language: python
sudo: false
services:
- docker
python:
@ -54,7 +53,7 @@ deploy:
on:
branch:
- master
skip_cleanup: true
cleanup: false
skip_existing: true
# - provider: pypi
# distributions: sdist bdist_wheel

View File

@ -2362,7 +2362,7 @@
- [ ] send_ssh_public_key
## ecr
30% implemented
27% implemented
- [ ] batch_check_layer_availability
- [X] batch_delete_image
- [X] batch_get_image
@ -2371,6 +2371,7 @@
- [ ] delete_lifecycle_policy
- [X] delete_repository
- [ ] delete_repository_policy
- [ ] describe_image_scan_findings
- [X] describe_images
- [X] describe_repositories
- [ ] get_authorization_token
@ -2382,9 +2383,11 @@
- [X] list_images
- [ ] list_tags_for_resource
- [X] put_image
- [ ] put_image_scanning_configuration
- [ ] put_image_tag_mutability
- [ ] put_lifecycle_policy
- [ ] set_repository_policy
- [ ] start_image_scan
- [ ] start_lifecycle_policy_preview
- [ ] tag_resource
- [ ] untag_resource
@ -2475,6 +2478,7 @@
- [ ] authorize_cache_security_group_ingress
- [ ] batch_apply_update_action
- [ ] batch_stop_update_action
- [ ] complete_migration
- [ ] copy_snapshot
- [ ] create_cache_cluster
- [ ] create_cache_parameter_group
@ -2516,6 +2520,7 @@
- [ ] remove_tags_from_resource
- [ ] reset_cache_parameter_group
- [ ] revoke_cache_security_group_ingress
- [ ] start_migration
- [ ] test_failover
## elasticbeanstalk
@ -3262,7 +3267,7 @@
- [ ] describe_events
## iam
60% implemented
62% implemented
- [ ] add_client_id_to_open_id_connect_provider
- [X] add_role_to_instance_profile
- [X] add_user_to_group
@ -3287,7 +3292,7 @@
- [X] deactivate_mfa_device
- [X] delete_access_key
- [X] delete_account_alias
- [ ] delete_account_password_policy
- [X] delete_account_password_policy
- [ ] delete_group
- [ ] delete_group_policy
- [ ] delete_instance_profile
@ -3317,7 +3322,7 @@
- [ ] generate_service_last_accessed_details
- [X] get_access_key_last_used
- [X] get_account_authorization_details
- [ ] get_account_password_policy
- [X] get_account_password_policy
- [ ] get_account_summary
- [ ] get_context_keys_for_custom_policy
- [ ] get_context_keys_for_principal_policy
@ -3387,7 +3392,7 @@
- [X] untag_role
- [ ] untag_user
- [X] update_access_key
- [ ] update_account_password_policy
- [X] update_account_password_policy
- [ ] update_assume_role_policy
- [ ] update_group
- [X] update_login_profile

View File

@ -30,6 +30,8 @@ Currently implemented Services:
+-----------------------+---------------------+-----------------------------------+
| Data Pipeline | @mock_datapipeline | basic endpoints done |
+-----------------------+---------------------+-----------------------------------+
| DataSync | @mock_datasync | some endpoints done |
+-----------------------+---------------------+-----------------------------------+
| - DynamoDB | - @mock_dynamodb | - core endpoints done |
| - DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes|
+-----------------------+---------------------+-----------------------------------+

View File

@ -1,25 +1,21 @@
from __future__ import unicode_literals
# import logging
# logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = "moto"
__version__ = "1.3.14.dev"
from .acm import mock_acm # noqa
from .apigateway import mock_apigateway, mock_apigateway_deprecated # noqa
from .athena import mock_athena # noqa
from .autoscaling import mock_autoscaling, mock_autoscaling_deprecated # noqa
from .awslambda import mock_lambda, mock_lambda_deprecated # noqa
from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # noqa
from .batch import mock_batch # noqa
from .cloudformation import mock_cloudformation # noqa
from .cloudformation import mock_cloudformation_deprecated # noqa
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # noqa
from .cognitoidentity import ( # noqa
mock_cognitoidentity,
mock_cognitoidentity_deprecated,
)
from .cognitoidentity import mock_cognitoidentity # noqa
from .cognitoidentity import mock_cognitoidentity_deprecated # noqa
from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # noqa
from .config import mock_config # noqa
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # noqa
from .datapipeline import mock_datapipeline # noqa
from .datapipeline import mock_datapipeline_deprecated # noqa
from .datasync import mock_datasync # noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa
from .dynamodbstreams import mock_dynamodbstreams # noqa
@ -33,31 +29,36 @@ from .events import mock_events # noqa
from .glacier import mock_glacier, mock_glacier_deprecated # noqa
from .glue import mock_glue # noqa
from .iam import mock_iam, mock_iam_deprecated # noqa
from .iot import mock_iot # noqa
from .iotdata import mock_iotdata # noqa
from .kinesis import mock_kinesis, mock_kinesis_deprecated # noqa
from .kms import mock_kms, mock_kms_deprecated # noqa
from .organizations import mock_organizations # noqa
from .logs import mock_logs, mock_logs_deprecated # noqa
from .opsworks import mock_opsworks, mock_opsworks_deprecated # noqa
from .organizations import mock_organizations # noqa
from .polly import mock_polly # noqa
from .rds import mock_rds, mock_rds_deprecated # noqa
from .rds2 import mock_rds2, mock_rds2_deprecated # noqa
from .redshift import mock_redshift, mock_redshift_deprecated # noqa
from .resourcegroups import mock_resourcegroups # noqa
from .resourcegroupstaggingapi import mock_resourcegroupstaggingapi # noqa
from .route53 import mock_route53, mock_route53_deprecated # noqa
from .s3 import mock_s3, mock_s3_deprecated # noqa
from .ses import mock_ses, mock_ses_deprecated # noqa
from .secretsmanager import mock_secretsmanager # noqa
from .ses import mock_ses, mock_ses_deprecated # noqa
from .sns import mock_sns, mock_sns_deprecated # noqa
from .sqs import mock_sqs, mock_sqs_deprecated # noqa
from .ssm import mock_ssm # noqa
from .stepfunctions import mock_stepfunctions # noqa
from .sts import mock_sts, mock_sts_deprecated # noqa
from .ssm import mock_ssm # noqa
from .route53 import mock_route53, mock_route53_deprecated # noqa
from .swf import mock_swf, mock_swf_deprecated # noqa
from .xray import mock_xray, mock_xray_client, XRaySegment # noqa
from .logs import mock_logs, mock_logs_deprecated # noqa
from .batch import mock_batch # noqa
from .resourcegroupstaggingapi import mock_resourcegroupstaggingapi # noqa
from .iot import mock_iot # noqa
from .iotdata import mock_iotdata # noqa
from .xray import XRaySegment, mock_xray, mock_xray_client # noqa
# import logging
# logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = "moto"
__version__ = "1.3.14.dev"
try:

View File

@ -2,6 +2,89 @@ from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class BadRequestException(RESTError):
pass
class AwsProxyNotAllowed(BadRequestException):
def __init__(self):
super(AwsProxyNotAllowed, self).__init__(
"BadRequestException",
"Integrations of type 'AWS_PROXY' currently only supports Lambda function and Firehose stream invocations.",
)
class CrossAccountNotAllowed(RESTError):
def __init__(self):
super(CrossAccountNotAllowed, self).__init__(
"AccessDeniedException", "Cross-account pass role is not allowed."
)
class RoleNotSpecified(BadRequestException):
def __init__(self):
super(RoleNotSpecified, self).__init__(
"BadRequestException", "Role ARN must be specified for AWS integrations"
)
class IntegrationMethodNotDefined(BadRequestException):
def __init__(self):
super(IntegrationMethodNotDefined, self).__init__(
"BadRequestException", "Enumeration value for HttpMethod must be non-empty"
)
class InvalidResourcePathException(BadRequestException):
def __init__(self):
super(InvalidResourcePathException, self).__init__(
"BadRequestException",
"Resource's path part only allow a-zA-Z0-9._- and curly braces at the beginning and the end.",
)
class InvalidHttpEndpoint(BadRequestException):
def __init__(self):
super(InvalidHttpEndpoint, self).__init__(
"BadRequestException", "Invalid HTTP endpoint specified for URI"
)
class InvalidArn(BadRequestException):
def __init__(self):
super(InvalidArn, self).__init__(
"BadRequestException", "Invalid ARN specified in the request"
)
class InvalidIntegrationArn(BadRequestException):
def __init__(self):
super(InvalidIntegrationArn, self).__init__(
"BadRequestException", "AWS ARN for integration must contain path or action"
)
class InvalidRequestInput(BadRequestException):
def __init__(self):
super(InvalidRequestInput, self).__init__(
"BadRequestException", "Invalid request input"
)
class NoIntegrationDefined(BadRequestException):
def __init__(self):
super(NoIntegrationDefined, self).__init__(
"BadRequestException", "No integration defined for method"
)
class NoMethodDefined(BadRequestException):
def __init__(self):
super(NoMethodDefined, self).__init__(
"BadRequestException", "The REST API doesn't contain any methods"
)
class StageNotFoundException(RESTError):
code = 404

View File

@ -3,15 +3,36 @@ from __future__ import unicode_literals
import random
import string
import re
import requests
import time
from boto3.session import Session
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import responses
from moto.core import BaseBackend, BaseModel
from .utils import create_id
from moto.core.utils import path_url
from .exceptions import StageNotFoundException, ApiKeyNotFoundException
from moto.sts.models import ACCOUNT_ID
from .exceptions import (
ApiKeyNotFoundException,
AwsProxyNotAllowed,
CrossAccountNotAllowed,
IntegrationMethodNotDefined,
InvalidArn,
InvalidIntegrationArn,
InvalidHttpEndpoint,
InvalidResourcePathException,
InvalidRequestInput,
StageNotFoundException,
RoleNotSpecified,
NoIntegrationDefined,
NoMethodDefined,
)
STAGE_URL = "https://{api_id}.execute-api.{region_name}.amazonaws.com/{stage_name}"
@ -534,6 +555,8 @@ class APIGatewayBackend(BaseBackend):
return resource
def create_resource(self, function_id, parent_resource_id, path_part):
if not re.match("^\\{?[a-zA-Z0-9._-]+\\}?$", path_part):
raise InvalidResourcePathException()
api = self.get_rest_api(function_id)
child = api.add_child(path=path_part, parent_id=parent_resource_id)
return child
@ -594,6 +617,10 @@ class APIGatewayBackend(BaseBackend):
stage = api.stages[stage_name] = Stage()
return stage.apply_operations(patch_operations)
def delete_stage(self, function_id, stage_name):
api = self.get_rest_api(function_id)
del api.stages[stage_name]
def get_method_response(self, function_id, resource_id, method_type, response_code):
method = self.get_method(function_id, resource_id, method_type)
method_response = method.get_response(response_code)
@ -620,9 +647,40 @@ class APIGatewayBackend(BaseBackend):
method_type,
integration_type,
uri,
integration_method=None,
credentials=None,
request_templates=None,
):
resource = self.get_resource(function_id, resource_id)
if credentials and not re.match(
"^arn:aws:iam::" + str(ACCOUNT_ID), credentials
):
raise CrossAccountNotAllowed()
if not integration_method and integration_type in [
"HTTP",
"HTTP_PROXY",
"AWS",
"AWS_PROXY",
]:
raise IntegrationMethodNotDefined()
if integration_type in ["AWS_PROXY"] and re.match(
"^arn:aws:apigateway:[a-zA-Z0-9-]+:s3", uri
):
raise AwsProxyNotAllowed()
if (
integration_type in ["AWS"]
and re.match("^arn:aws:apigateway:[a-zA-Z0-9-]+:s3", uri)
and not credentials
):
raise RoleNotSpecified()
if integration_type in ["HTTP", "HTTP_PROXY"] and not self._uri_validator(uri):
raise InvalidHttpEndpoint()
if integration_type in ["AWS", "AWS_PROXY"] and not re.match("^arn:aws:", uri):
raise InvalidArn()
if integration_type in ["AWS", "AWS_PROXY"] and not re.match(
"^arn:aws:apigateway:[a-zA-Z0-9-]+:[a-zA-Z0-9-]+:(path|action)/", uri
):
raise InvalidIntegrationArn()
integration = resource.add_integration(
method_type, integration_type, uri, request_templates=request_templates
)
@ -637,8 +695,16 @@ class APIGatewayBackend(BaseBackend):
return resource.delete_integration(method_type)
def create_integration_response(
self, function_id, resource_id, method_type, status_code, selection_pattern
self,
function_id,
resource_id,
method_type,
status_code,
selection_pattern,
response_templates,
):
if response_templates is None:
raise InvalidRequestInput()
integration = self.get_integration(function_id, resource_id, method_type)
integration_response = integration.create_integration_response(
status_code, selection_pattern
@ -665,6 +731,18 @@ class APIGatewayBackend(BaseBackend):
if stage_variables is None:
stage_variables = {}
api = self.get_rest_api(function_id)
methods = [
list(res.resource_methods.values())
for res in self.list_resources(function_id)
][0]
if not any(methods):
raise NoMethodDefined()
method_integrations = [
method["methodIntegration"] if "methodIntegration" in method else None
for method in methods
]
if not any(method_integrations):
raise NoIntegrationDefined()
deployment = api.create_deployment(name, description, stage_variables)
return deployment
@ -753,6 +831,13 @@ class APIGatewayBackend(BaseBackend):
self.usage_plan_keys[usage_plan_id].pop(key_id)
return {}
def _uri_validator(self, uri):
try:
result = urlparse(uri)
return all([result.scheme, result.netloc, result.path])
except Exception:
return False
apigateway_backends = {}
for region_name in Session().get_available_regions("apigateway"):

View File

@ -4,12 +4,24 @@ import json
from moto.core.responses import BaseResponse
from .models import apigateway_backends
from .exceptions import StageNotFoundException, ApiKeyNotFoundException
from .exceptions import (
ApiKeyNotFoundException,
BadRequestException,
CrossAccountNotAllowed,
StageNotFoundException,
)
class APIGatewayResponse(BaseResponse):
def error(self, type_, message, status=400):
return (
status,
self.response_headers,
json.dumps({"__type": type_, "message": message}),
)
def _get_param(self, key):
return json.loads(self.body).get(key)
return json.loads(self.body).get(key) if self.body else None
def _get_param_with_default_value(self, key, default):
jsonbody = json.loads(self.body)
@ -63,14 +75,21 @@ class APIGatewayResponse(BaseResponse):
function_id = self.path.replace("/restapis/", "", 1).split("/")[0]
resource_id = self.path.split("/")[-1]
if self.method == "GET":
resource = self.backend.get_resource(function_id, resource_id)
elif self.method == "POST":
path_part = self._get_param("pathPart")
resource = self.backend.create_resource(function_id, resource_id, path_part)
elif self.method == "DELETE":
resource = self.backend.delete_resource(function_id, resource_id)
return 200, {}, json.dumps(resource.to_dict())
try:
if self.method == "GET":
resource = self.backend.get_resource(function_id, resource_id)
elif self.method == "POST":
path_part = self._get_param("pathPart")
resource = self.backend.create_resource(
function_id, resource_id, path_part
)
elif self.method == "DELETE":
resource = self.backend.delete_resource(function_id, resource_id)
return 200, {}, json.dumps(resource.to_dict())
except BadRequestException as e:
return self.error(
"com.amazonaws.dynamodb.v20111205#BadRequestException", e.message
)
def resource_methods(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
@ -165,6 +184,9 @@ class APIGatewayResponse(BaseResponse):
stage_response = self.backend.update_stage(
function_id, stage_name, patch_operations
)
elif self.method == "DELETE":
self.backend.delete_stage(function_id, stage_name)
return 202, {}, "{}"
return 200, {}, json.dumps(stage_response)
def integrations(self, request, full_url, headers):
@ -174,27 +196,40 @@ class APIGatewayResponse(BaseResponse):
resource_id = url_path_parts[4]
method_type = url_path_parts[6]
if self.method == "GET":
integration_response = self.backend.get_integration(
function_id, resource_id, method_type
try:
if self.method == "GET":
integration_response = self.backend.get_integration(
function_id, resource_id, method_type
)
elif self.method == "PUT":
integration_type = self._get_param("type")
uri = self._get_param("uri")
integration_http_method = self._get_param("httpMethod")
creds = self._get_param("credentials")
request_templates = self._get_param("requestTemplates")
integration_response = self.backend.create_integration(
function_id,
resource_id,
method_type,
integration_type,
uri,
credentials=creds,
integration_method=integration_http_method,
request_templates=request_templates,
)
elif self.method == "DELETE":
integration_response = self.backend.delete_integration(
function_id, resource_id, method_type
)
return 200, {}, json.dumps(integration_response)
except BadRequestException as e:
return self.error(
"com.amazonaws.dynamodb.v20111205#BadRequestException", e.message
)
elif self.method == "PUT":
integration_type = self._get_param("type")
uri = self._get_param("uri")
request_templates = self._get_param("requestTemplates")
integration_response = self.backend.create_integration(
function_id,
resource_id,
method_type,
integration_type,
uri,
request_templates=request_templates,
except CrossAccountNotAllowed as e:
return self.error(
"com.amazonaws.dynamodb.v20111205#AccessDeniedException", e.message
)
elif self.method == "DELETE":
integration_response = self.backend.delete_integration(
function_id, resource_id, method_type
)
return 200, {}, json.dumps(integration_response)
def integration_responses(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
@ -204,36 +239,52 @@ class APIGatewayResponse(BaseResponse):
method_type = url_path_parts[6]
status_code = url_path_parts[9]
if self.method == "GET":
integration_response = self.backend.get_integration_response(
function_id, resource_id, method_type, status_code
try:
if self.method == "GET":
integration_response = self.backend.get_integration_response(
function_id, resource_id, method_type, status_code
)
elif self.method == "PUT":
selection_pattern = self._get_param("selectionPattern")
response_templates = self._get_param("responseTemplates")
integration_response = self.backend.create_integration_response(
function_id,
resource_id,
method_type,
status_code,
selection_pattern,
response_templates,
)
elif self.method == "DELETE":
integration_response = self.backend.delete_integration_response(
function_id, resource_id, method_type, status_code
)
return 200, {}, json.dumps(integration_response)
except BadRequestException as e:
return self.error(
"com.amazonaws.dynamodb.v20111205#BadRequestException", e.message
)
elif self.method == "PUT":
selection_pattern = self._get_param("selectionPattern")
integration_response = self.backend.create_integration_response(
function_id, resource_id, method_type, status_code, selection_pattern
)
elif self.method == "DELETE":
integration_response = self.backend.delete_integration_response(
function_id, resource_id, method_type, status_code
)
return 200, {}, json.dumps(integration_response)
def deployments(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
function_id = self.path.replace("/restapis/", "", 1).split("/")[0]
if self.method == "GET":
deployments = self.backend.get_deployments(function_id)
return 200, {}, json.dumps({"item": deployments})
elif self.method == "POST":
name = self._get_param("stageName")
description = self._get_param_with_default_value("description", "")
stage_variables = self._get_param_with_default_value("variables", {})
deployment = self.backend.create_deployment(
function_id, name, description, stage_variables
try:
if self.method == "GET":
deployments = self.backend.get_deployments(function_id)
return 200, {}, json.dumps({"item": deployments})
elif self.method == "POST":
name = self._get_param("stageName")
description = self._get_param_with_default_value("description", "")
stage_variables = self._get_param_with_default_value("variables", {})
deployment = self.backend.create_deployment(
function_id, name, description, stage_variables
)
return 200, {}, json.dumps(deployment)
except BadRequestException as e:
return self.error(
"com.amazonaws.dynamodb.v20111205#BadRequestException", e.message
)
return 200, {}, json.dumps(deployment)
def individual_deployment(self, request, full_url, headers):
self.setup_class(request, full_url, headers)

View File

@ -304,6 +304,8 @@ class LambdaFunction(BaseModel):
self.timeout = value
elif key == "VpcConfig":
self.vpc_config = value
elif key == "Environment":
self.environment_vars = value["Variables"]
return self.get_configuration()
@ -634,7 +636,7 @@ class LambdaStorage(object):
def _get_alias(self, name, alias):
return self._functions[name]["alias"].get(alias, None)
def get_function(self, name, qualifier=None):
def get_function_by_name(self, name, qualifier=None):
if name not in self._functions:
return None
@ -657,8 +659,8 @@ class LambdaStorage(object):
def get_arn(self, arn):
return self._arns.get(arn, None)
def get_function_by_name_or_arn(self, input):
return self.get_function(input) or self.get_arn(input)
def get_function_by_name_or_arn(self, input, qualifier=None):
return self.get_function_by_name(input, qualifier) or self.get_arn(input)
def put_function(self, fn):
"""
@ -719,7 +721,7 @@ class LambdaStorage(object):
return True
else:
fn = self.get_function(name, qualifier)
fn = self.get_function_by_name(name, qualifier)
if fn:
self._functions[name]["versions"].remove(fn)
@ -822,8 +824,10 @@ class LambdaBackend(BaseBackend):
def publish_function(self, function_name):
return self._lambdas.publish_function(function_name)
def get_function(self, function_name, qualifier=None):
return self._lambdas.get_function(function_name, qualifier)
def get_function(self, function_name_or_arn, qualifier=None):
return self._lambdas.get_function_by_name_or_arn(
function_name_or_arn, qualifier
)
def list_versions_by_function(self, function_name):
return self._lambdas.list_versions_by_function(function_name)
@ -928,7 +932,7 @@ class LambdaBackend(BaseBackend):
}
]
}
func = self._lambdas.get_function(function_name, qualifier)
func = self._lambdas.get_function_by_name_or_arn(function_name, qualifier)
func.invoke(json.dumps(event), {}, {})
def send_dynamodb_items(self, function_arn, items, source):

View File

@ -286,7 +286,7 @@ class LambdaResponse(BaseResponse):
return 404, {}, "{}"
def _get_function(self, request, full_url, headers):
function_name = self.path.rsplit("/", 1)[-1]
function_name = unquote(self.path.rsplit("/", 1)[-1])
qualifier = self._get_param("Qualifier", None)
fn = self.lambda_backend.get_function(function_name, qualifier)

View File

@ -5,12 +5,15 @@ from moto.apigateway import apigateway_backends
from moto.athena import athena_backends
from moto.autoscaling import autoscaling_backends
from moto.awslambda import lambda_backends
from moto.batch import batch_backends
from moto.cloudformation import cloudformation_backends
from moto.cloudwatch import cloudwatch_backends
from moto.cognitoidentity import cognitoidentity_backends
from moto.cognitoidp import cognitoidp_backends
from moto.config import config_backends
from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends
from moto.datasync import datasync_backends
from moto.dynamodb import dynamodb_backends
from moto.dynamodb2 import dynamodb_backends2
from moto.dynamodbstreams import dynamodbstreams_backends
@ -25,6 +28,8 @@ from moto.glacier import glacier_backends
from moto.glue import glue_backends
from moto.iam import iam_backends
from moto.instance_metadata import instance_metadata_backends
from moto.iot import iot_backends
from moto.iotdata import iotdata_backends
from moto.kinesis import kinesis_backends
from moto.kms import kms_backends
from moto.logs import logs_backends
@ -34,6 +39,7 @@ from moto.polly import polly_backends
from moto.rds2 import rds2_backends
from moto.redshift import redshift_backends
from moto.resourcegroups import resourcegroups_backends
from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends
from moto.route53 import route53_backends
from moto.s3 import s3_backends
from moto.secretsmanager import secretsmanager_backends
@ -45,11 +51,6 @@ from moto.stepfunctions import stepfunction_backends
from moto.sts import sts_backends
from moto.swf import swf_backends
from moto.xray import xray_backends
from moto.iot import iot_backends
from moto.iotdata import iotdata_backends
from moto.batch import batch_backends
from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends
from moto.config import config_backends
BACKENDS = {
"acm": acm_backends,
@ -63,6 +64,7 @@ BACKENDS = {
"cognito-idp": cognitoidp_backends,
"config": config_backends,
"datapipeline": datapipeline_backends,
"datasync": datasync_backends,
"dynamodb": dynamodb_backends,
"dynamodb2": dynamodb_backends2,
"dynamodbstreams": dynamodbstreams_backends,

View File

@ -0,0 +1,8 @@
from __future__ import unicode_literals
from ..core.models import base_decorator, deprecated_base_decorator
from .models import datasync_backends
datasync_backend = datasync_backends["us-east-1"]
mock_datasync = base_decorator(datasync_backends)
mock_datasync_deprecated = deprecated_base_decorator(datasync_backends)

View File

@ -0,0 +1,15 @@
from __future__ import unicode_literals
from moto.core.exceptions import JsonRESTError
class DataSyncClientError(JsonRESTError):
code = 400
class InvalidRequestException(DataSyncClientError):
def __init__(self, msg=None):
self.code = 400
super(InvalidRequestException, self).__init__(
"InvalidRequestException", msg or "The request is not valid."
)

178
moto/datasync/models.py Normal file
View File

@ -0,0 +1,178 @@
import boto3
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel
from .exceptions import InvalidRequestException
class Location(BaseModel):
def __init__(
self, location_uri, region_name=None, typ=None, metadata=None, arn_counter=0
):
self.uri = location_uri
self.region_name = region_name
self.metadata = metadata
self.typ = typ
# Generate ARN
self.arn = "arn:aws:datasync:{0}:111222333444:location/loc-{1}".format(
region_name, str(arn_counter).zfill(17)
)
class Task(BaseModel):
def __init__(
self,
source_location_arn,
destination_location_arn,
name,
region_name,
arn_counter=0,
):
self.source_location_arn = source_location_arn
self.destination_location_arn = destination_location_arn
# For simplicity Tasks are either available or running
self.status = "AVAILABLE"
self.name = name
self.current_task_execution_arn = None
# Generate ARN
self.arn = "arn:aws:datasync:{0}:111222333444:task/task-{1}".format(
region_name, str(arn_counter).zfill(17)
)
class TaskExecution(BaseModel):
# For simplicity, task_execution can never fail
# Some documentation refers to this list:
# 'Status': 'QUEUED'|'LAUNCHING'|'PREPARING'|'TRANSFERRING'|'VERIFYING'|'SUCCESS'|'ERROR'
# Others refers to this list:
# INITIALIZING | PREPARING | TRANSFERRING | VERIFYING | SUCCESS/FAILURE
# Checking with AWS Support...
TASK_EXECUTION_INTERMEDIATE_STATES = (
"INITIALIZING",
# 'QUEUED', 'LAUNCHING',
"PREPARING",
"TRANSFERRING",
"VERIFYING",
)
TASK_EXECUTION_FAILURE_STATES = ("ERROR",)
TASK_EXECUTION_SUCCESS_STATES = ("SUCCESS",)
# Also COMPLETED state?
def __init__(self, task_arn, arn_counter=0):
self.task_arn = task_arn
self.arn = "{0}/execution/exec-{1}".format(task_arn, str(arn_counter).zfill(17))
self.status = self.TASK_EXECUTION_INTERMEDIATE_STATES[0]
# Simulate a task execution
def iterate_status(self):
if self.status in self.TASK_EXECUTION_FAILURE_STATES:
return
if self.status in self.TASK_EXECUTION_SUCCESS_STATES:
return
if self.status in self.TASK_EXECUTION_INTERMEDIATE_STATES:
for i, status in enumerate(self.TASK_EXECUTION_INTERMEDIATE_STATES):
if status == self.status:
if i < len(self.TASK_EXECUTION_INTERMEDIATE_STATES) - 1:
self.status = self.TASK_EXECUTION_INTERMEDIATE_STATES[i + 1]
else:
self.status = self.TASK_EXECUTION_SUCCESS_STATES[0]
return
raise Exception(
"TaskExecution.iterate_status: Unknown status={0}".format(self.status)
)
def cancel(self):
if self.status not in self.TASK_EXECUTION_INTERMEDIATE_STATES:
raise InvalidRequestException(
"Sync task cannot be cancelled in its current status: {0}".format(
self.status
)
)
self.status = "ERROR"
class DataSyncBackend(BaseBackend):
def __init__(self, region_name):
self.region_name = region_name
# Always increase when new things are created
# This ensures uniqueness
self.arn_counter = 0
self.locations = OrderedDict()
self.tasks = OrderedDict()
self.task_executions = OrderedDict()
def reset(self):
region_name = self.region_name
self._reset_model_refs()
self.__dict__ = {}
self.__init__(region_name)
def create_location(self, location_uri, typ=None, metadata=None):
"""
# AWS DataSync allows for duplicate LocationUris
for arn, location in self.locations.items():
if location.uri == location_uri:
raise Exception('Location already exists')
"""
if not typ:
raise Exception("Location type must be specified")
self.arn_counter = self.arn_counter + 1
location = Location(
location_uri,
region_name=self.region_name,
arn_counter=self.arn_counter,
metadata=metadata,
typ=typ,
)
self.locations[location.arn] = location
return location.arn
def create_task(self, source_location_arn, destination_location_arn, name):
if source_location_arn not in self.locations:
raise InvalidRequestException(
"Location {0} not found.".format(source_location_arn)
)
if destination_location_arn not in self.locations:
raise InvalidRequestException(
"Location {0} not found.".format(destination_location_arn)
)
self.arn_counter = self.arn_counter + 1
task = Task(
source_location_arn,
destination_location_arn,
name,
region_name=self.region_name,
arn_counter=self.arn_counter,
)
self.tasks[task.arn] = task
return task.arn
def start_task_execution(self, task_arn):
self.arn_counter = self.arn_counter + 1
if task_arn in self.tasks:
task = self.tasks[task_arn]
if task.status == "AVAILABLE":
task_execution = TaskExecution(task_arn, arn_counter=self.arn_counter)
self.task_executions[task_execution.arn] = task_execution
self.tasks[task_arn].current_task_execution_arn = task_execution.arn
self.tasks[task_arn].status = "RUNNING"
return task_execution.arn
raise InvalidRequestException("Invalid request.")
def cancel_task_execution(self, task_execution_arn):
if task_execution_arn in self.task_executions:
task_execution = self.task_executions[task_execution_arn]
task_execution.cancel()
task_arn = task_execution.task_arn
self.tasks[task_arn].current_task_execution_arn = None
return
raise InvalidRequestException(
"Sync task {0} is not found.".format(task_execution_arn)
)
datasync_backends = {}
for region in boto3.Session().get_available_regions("datasync"):
datasync_backends[region] = DataSyncBackend(region_name=region)

155
moto/datasync/responses.py Normal file
View File

@ -0,0 +1,155 @@
import json
from moto.core.responses import BaseResponse
from .exceptions import InvalidRequestException
from .models import datasync_backends
class DataSyncResponse(BaseResponse):
@property
def datasync_backend(self):
return datasync_backends[self.region]
def list_locations(self):
locations = list()
for arn, location in self.datasync_backend.locations.items():
locations.append({"LocationArn": location.arn, "LocationUri": location.uri})
return json.dumps({"Locations": locations})
def _get_location(self, location_arn, typ):
location_arn = self._get_param("LocationArn")
if location_arn not in self.datasync_backend.locations:
raise InvalidRequestException(
"Location {0} is not found.".format(location_arn)
)
location = self.datasync_backend.locations[location_arn]
if location.typ != typ:
raise InvalidRequestException(
"Invalid Location type: {0}".format(location.typ)
)
return location
def create_location_s3(self):
# s3://bucket_name/folder/
s3_bucket_arn = self._get_param("S3BucketArn")
subdirectory = self._get_param("Subdirectory")
metadata = {"S3Config": self._get_param("S3Config")}
location_uri_elts = ["s3:/", s3_bucket_arn.split(":")[-1]]
if subdirectory:
location_uri_elts.append(subdirectory)
location_uri = "/".join(location_uri_elts)
arn = self.datasync_backend.create_location(
location_uri, metadata=metadata, typ="S3"
)
return json.dumps({"LocationArn": arn})
def describe_location_s3(self):
location_arn = self._get_param("LocationArn")
location = self._get_location(location_arn, typ="S3")
return json.dumps(
{
"LocationArn": location.arn,
"LocationUri": location.uri,
"S3Config": location.metadata["S3Config"],
}
)
def create_location_smb(self):
# smb://smb.share.fqdn/AWS_Test/
subdirectory = self._get_param("Subdirectory")
server_hostname = self._get_param("ServerHostname")
metadata = {
"AgentArns": self._get_param("AgentArns"),
"User": self._get_param("User"),
"Domain": self._get_param("Domain"),
"MountOptions": self._get_param("MountOptions"),
}
location_uri = "/".join(["smb:/", server_hostname, subdirectory])
arn = self.datasync_backend.create_location(
location_uri, metadata=metadata, typ="SMB"
)
return json.dumps({"LocationArn": arn})
def describe_location_smb(self):
location_arn = self._get_param("LocationArn")
location = self._get_location(location_arn, typ="SMB")
return json.dumps(
{
"LocationArn": location.arn,
"LocationUri": location.uri,
"AgentArns": location.metadata["AgentArns"],
"User": location.metadata["User"],
"Domain": location.metadata["Domain"],
"MountOptions": location.metadata["MountOptions"],
}
)
def create_task(self):
destination_location_arn = self._get_param("DestinationLocationArn")
source_location_arn = self._get_param("SourceLocationArn")
name = self._get_param("Name")
arn = self.datasync_backend.create_task(
source_location_arn, destination_location_arn, name
)
return json.dumps({"TaskArn": arn})
def list_tasks(self):
tasks = list()
for arn, task in self.datasync_backend.tasks.items():
tasks.append(
{"Name": task.name, "Status": task.status, "TaskArn": task.arn}
)
return json.dumps({"Tasks": tasks})
def describe_task(self):
task_arn = self._get_param("TaskArn")
if task_arn in self.datasync_backend.tasks:
task = self.datasync_backend.tasks[task_arn]
return json.dumps(
{
"TaskArn": task.arn,
"Name": task.name,
"CurrentTaskExecutionArn": task.current_task_execution_arn,
"Status": task.status,
"SourceLocationArn": task.source_location_arn,
"DestinationLocationArn": task.destination_location_arn,
}
)
raise InvalidRequestException
def start_task_execution(self):
task_arn = self._get_param("TaskArn")
if task_arn in self.datasync_backend.tasks:
arn = self.datasync_backend.start_task_execution(task_arn)
if arn:
return json.dumps({"TaskExecutionArn": arn})
raise InvalidRequestException("Invalid request.")
def cancel_task_execution(self):
task_execution_arn = self._get_param("TaskExecutionArn")
self.datasync_backend.cancel_task_execution(task_execution_arn)
return json.dumps({})
def describe_task_execution(self):
task_execution_arn = self._get_param("TaskExecutionArn")
if task_execution_arn in self.datasync_backend.task_executions:
task_execution = self.datasync_backend.task_executions[task_execution_arn]
if task_execution:
result = json.dumps(
{
"TaskExecutionArn": task_execution.arn,
"Status": task_execution.status,
}
)
if task_execution.status == "SUCCESS":
self.datasync_backend.tasks[
task_execution.task_arn
].status = "AVAILABLE"
# Simulate task being executed
task_execution.iterate_status()
return result
raise InvalidRequestException

9
moto/datasync/urls.py Normal file
View File

@ -0,0 +1,9 @@
from __future__ import unicode_literals
from .responses import DataSyncResponse
url_bases = ["https?://(.*?)(datasync)(.*?).amazonaws.com"]
url_paths = {
"{0}/$": DataSyncResponse.dispatch,
}

View File

@ -107,6 +107,28 @@ class DynamoType(object):
else:
self.value.pop(key)
def filter(self, projection_expressions):
nested_projections = [
expr[0 : expr.index(".")] for expr in projection_expressions if "." in expr
]
if self.is_map():
expressions_to_delete = []
for attr in self.value:
if (
attr not in projection_expressions
and attr not in nested_projections
):
expressions_to_delete.append(attr)
elif attr in nested_projections:
relevant_expressions = [
expr[len(attr + ".") :]
for expr in projection_expressions
if expr.startswith(attr + ".")
]
self.value[attr].filter(relevant_expressions)
for expr in expressions_to_delete:
self.value.pop(expr)
def __hash__(self):
return hash((self.type, self.value))
@ -477,6 +499,24 @@ class Item(BaseModel):
"%s action not support for update_with_attribute_updates" % action
)
# Filter using projection_expression
# Ensure a deep copy is used to filter, otherwise actual data will be removed
def filter(self, projection_expression):
expressions = [x.strip() for x in projection_expression.split(",")]
top_level_expressions = [
expr[0 : expr.index(".")] for expr in expressions if "." in expr
]
for attr in list(self.attrs):
if attr not in expressions and attr not in top_level_expressions:
self.attrs.pop(attr)
if attr in top_level_expressions:
relevant_expressions = [
expr[len(attr + ".") :]
for expr in expressions
if expr.startswith(attr + ".")
]
self.attrs[attr].filter(relevant_expressions)
class StreamRecord(BaseModel):
def __init__(self, table, stream_type, event_name, old, new, seq):
@ -774,11 +814,8 @@ class Table(BaseModel):
result = self.items[hash_key]
if projection_expression and result:
expressions = [x.strip() for x in projection_expression.split(",")]
result = copy.deepcopy(result)
for attr in list(result.attrs):
if attr not in expressions:
result.attrs.pop(attr)
result.filter(projection_expression)
if not result:
raise KeyError
@ -911,13 +948,10 @@ class Table(BaseModel):
if filter_expression is not None:
results = [item for item in results if filter_expression.expr(item)]
results = copy.deepcopy(results)
if projection_expression:
expressions = [x.strip() for x in projection_expression.split(",")]
results = copy.deepcopy(results)
for result in results:
for attr in list(result.attrs):
if attr not in expressions:
result.attrs.pop(attr)
result.filter(projection_expression)
results, last_evaluated_key = self._trim_results(
results, limit, exclusive_start_key
@ -1004,12 +1038,9 @@ class Table(BaseModel):
results.append(item)
if projection_expression:
expressions = [x.strip() for x in projection_expression.split(",")]
results = copy.deepcopy(results)
for result in results:
for attr in list(result.attrs):
if attr not in expressions:
result.attrs.pop(attr)
result.filter(projection_expression)
results, last_evaluated_key = self._trim_results(
results, limit, exclusive_start_key, index_name

View File

@ -571,25 +571,22 @@ class DynamoHandler(BaseResponse):
return dynamo_json_dump(result)
def _adjust_projection_expression(
self, projection_expression, expression_attribute_names
):
if projection_expression and expression_attribute_names:
expressions = [x.strip() for x in projection_expression.split(",")]
projection_expr = None
for expression in expressions:
if projection_expr is not None:
projection_expr = projection_expr + ", "
else:
projection_expr = ""
def _adjust_projection_expression(self, projection_expression, expr_attr_names):
def _adjust(expression):
return (
expr_attr_names[expression]
if expression in expr_attr_names
else expression
)
if expression in expression_attribute_names:
projection_expr = (
projection_expr + expression_attribute_names[expression]
)
else:
projection_expr = projection_expr + expression
return projection_expr
if projection_expression and expr_attr_names:
expressions = [x.strip() for x in projection_expression.split(",")]
return ",".join(
[
".".join([_adjust(expr) for expr in nested_expr.split(".")])
for nested_expr in expressions
]
)
return projection_expression

View File

@ -128,3 +128,10 @@ class InvalidInput(RESTError):
def __init__(self, message):
super(InvalidInput, self).__init__("InvalidInput", message)
class NoSuchEntity(RESTError):
code = 404
def __init__(self, message):
super(NoSuchEntity, self).__init__("NoSuchEntity", message)

View File

@ -35,6 +35,7 @@ from .exceptions import (
EntityAlreadyExists,
ValidationError,
InvalidInput,
NoSuchEntity,
)
from .utils import (
random_access_key,
@ -652,6 +653,89 @@ class User(BaseModel):
)
class AccountPasswordPolicy(BaseModel):
def __init__(
self,
allow_change_password,
hard_expiry,
max_password_age,
minimum_password_length,
password_reuse_prevention,
require_lowercase_characters,
require_numbers,
require_symbols,
require_uppercase_characters,
):
self._errors = []
self._validate(
max_password_age, minimum_password_length, password_reuse_prevention
)
self.allow_users_to_change_password = allow_change_password
self.hard_expiry = hard_expiry
self.max_password_age = max_password_age
self.minimum_password_length = minimum_password_length
self.password_reuse_prevention = password_reuse_prevention
self.require_lowercase_characters = require_lowercase_characters
self.require_numbers = require_numbers
self.require_symbols = require_symbols
self.require_uppercase_characters = require_uppercase_characters
@property
def expire_passwords(self):
return True if self.max_password_age and self.max_password_age > 0 else False
def _validate(
self, max_password_age, minimum_password_length, password_reuse_prevention
):
if minimum_password_length > 128:
self._errors.append(
self._format_error(
key="minimumPasswordLength",
value=minimum_password_length,
constraint="Member must have value less than or equal to 128",
)
)
if password_reuse_prevention and password_reuse_prevention > 24:
self._errors.append(
self._format_error(
key="passwordReusePrevention",
value=password_reuse_prevention,
constraint="Member must have value less than or equal to 24",
)
)
if max_password_age and max_password_age > 1095:
self._errors.append(
self._format_error(
key="maxPasswordAge",
value=max_password_age,
constraint="Member must have value less than or equal to 1095",
)
)
self._raise_errors()
def _format_error(self, key, value, constraint):
return 'Value "{value}" at "{key}" failed to satisfy constraint: {constraint}'.format(
constraint=constraint, key=key, value=value,
)
def _raise_errors(self):
if self._errors:
count = len(self._errors)
plural = "s" if len(self._errors) > 1 else ""
errors = "; ".join(self._errors)
self._errors = [] # reset collected errors
raise ValidationError(
"{count} validation error{plural} detected: {errors}".format(
count=count, plural=plural, errors=errors,
)
)
class IAMBackend(BaseBackend):
def __init__(self):
self.instance_profiles = {}
@ -666,6 +750,7 @@ class IAMBackend(BaseBackend):
self.open_id_providers = {}
self.policy_arn_regex = re.compile(r"^arn:aws:iam::[0-9]*:policy/.*$")
self.virtual_mfa_devices = {}
self.account_password_policy = None
super(IAMBackend, self).__init__()
def _init_managed_policies(self):
@ -1590,5 +1675,47 @@ class IAMBackend(BaseBackend):
def list_open_id_connect_providers(self):
return list(self.open_id_providers.keys())
def update_account_password_policy(
self,
allow_change_password,
hard_expiry,
max_password_age,
minimum_password_length,
password_reuse_prevention,
require_lowercase_characters,
require_numbers,
require_symbols,
require_uppercase_characters,
):
self.account_password_policy = AccountPasswordPolicy(
allow_change_password,
hard_expiry,
max_password_age,
minimum_password_length,
password_reuse_prevention,
require_lowercase_characters,
require_numbers,
require_symbols,
require_uppercase_characters,
)
def get_account_password_policy(self):
if not self.account_password_policy:
raise NoSuchEntity(
"The Password Policy with domain name {} cannot be found.".format(
ACCOUNT_ID
)
)
return self.account_password_policy
def delete_account_password_policy(self):
if not self.account_password_policy:
raise NoSuchEntity(
"The account policy with name PasswordPolicy cannot be found."
)
self.account_password_policy = None
iam_backend = IAMBackend()

View File

@ -838,6 +838,50 @@ class IamResponse(BaseResponse):
template = self.response_template(LIST_OPEN_ID_CONNECT_PROVIDERS_TEMPLATE)
return template.render(open_id_provider_arns=open_id_provider_arns)
def update_account_password_policy(self):
allow_change_password = self._get_bool_param(
"AllowUsersToChangePassword", False
)
hard_expiry = self._get_bool_param("HardExpiry")
max_password_age = self._get_int_param("MaxPasswordAge")
minimum_password_length = self._get_int_param("MinimumPasswordLength", 6)
password_reuse_prevention = self._get_int_param("PasswordReusePrevention")
require_lowercase_characters = self._get_bool_param(
"RequireLowercaseCharacters", False
)
require_numbers = self._get_bool_param("RequireNumbers", False)
require_symbols = self._get_bool_param("RequireSymbols", False)
require_uppercase_characters = self._get_bool_param(
"RequireUppercaseCharacters", False
)
iam_backend.update_account_password_policy(
allow_change_password,
hard_expiry,
max_password_age,
minimum_password_length,
password_reuse_prevention,
require_lowercase_characters,
require_numbers,
require_symbols,
require_uppercase_characters,
)
template = self.response_template(UPDATE_ACCOUNT_PASSWORD_POLICY_TEMPLATE)
return template.render()
def get_account_password_policy(self):
account_password_policy = iam_backend.get_account_password_policy()
template = self.response_template(GET_ACCOUNT_PASSWORD_POLICY_TEMPLATE)
return template.render(password_policy=account_password_policy)
def delete_account_password_policy(self):
iam_backend.delete_account_password_policy()
template = self.response_template(DELETE_ACCOUNT_PASSWORD_POLICY_TEMPLATE)
return template.render()
LIST_ENTITIES_FOR_POLICY_TEMPLATE = """<ListEntitiesForPolicyResponse>
<ListEntitiesForPolicyResult>
@ -2170,3 +2214,44 @@ LIST_OPEN_ID_CONNECT_PROVIDERS_TEMPLATE = """<ListOpenIDConnectProvidersResponse
<RequestId>de2c0228-4f63-11e4-aefa-bfd6aEXAMPLE</RequestId>
</ResponseMetadata>
</ListOpenIDConnectProvidersResponse>"""
UPDATE_ACCOUNT_PASSWORD_POLICY_TEMPLATE = """<UpdateAccountPasswordPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</UpdateAccountPasswordPolicyResponse>"""
GET_ACCOUNT_PASSWORD_POLICY_TEMPLATE = """<GetAccountPasswordPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetAccountPasswordPolicyResult>
<PasswordPolicy>
<AllowUsersToChangePassword>{{ password_policy.allow_users_to_change_password | lower }}</AllowUsersToChangePassword>
<ExpirePasswords>{{ password_policy.expire_passwords | lower }}</ExpirePasswords>
{% if password_policy.hard_expiry %}
<HardExpiry>{{ password_policy.hard_expiry | lower }}</HardExpiry>
{% endif %}
{% if password_policy.max_password_age %}
<MaxPasswordAge>{{ password_policy.max_password_age }}</MaxPasswordAge>
{% endif %}
<MinimumPasswordLength>{{ password_policy.minimum_password_length }}</MinimumPasswordLength>
{% if password_policy.password_reuse_prevention %}
<PasswordReusePrevention>{{ password_policy.password_reuse_prevention }}</PasswordReusePrevention>
{% endif %}
<RequireLowercaseCharacters>{{ password_policy.require_lowercase_characters | lower }}</RequireLowercaseCharacters>
<RequireNumbers>{{ password_policy.require_numbers | lower }}</RequireNumbers>
<RequireSymbols>{{ password_policy.require_symbols | lower }}</RequireSymbols>
<RequireUppercaseCharacters>{{ password_policy.require_uppercase_characters | lower }}</RequireUppercaseCharacters>
</PasswordPolicy>
</GetAccountPasswordPolicyResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</GetAccountPasswordPolicyResponse>"""
DELETE_ACCOUNT_PASSWORD_POLICY_TEMPLATE = """<DeleteAccountPasswordPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</DeleteAccountPasswordPolicyResponse>"""

View File

@ -29,7 +29,7 @@ from .exceptions import (
ResourceNotFoundError,
TagLimitExceededError,
)
from .utils import make_arn_for_topic, make_arn_for_subscription
from .utils import make_arn_for_topic, make_arn_for_subscription, is_e164
DEFAULT_ACCOUNT_ID = 123456789012
DEFAULT_PAGE_SIZE = 100
@ -413,6 +413,17 @@ class SNSBackend(BaseBackend):
setattr(topic, attribute_name, attribute_value)
def subscribe(self, topic_arn, endpoint, protocol):
if protocol == "sms":
if re.search(r"[./-]{2,}", endpoint) or re.search(
r"(^[./-]|[./-]$)", endpoint
):
raise SNSInvalidParameter("Invalid SMS endpoint: {}".format(endpoint))
reduced_endpoint = re.sub(r"[./-]", "", endpoint)
if not is_e164(reduced_endpoint):
raise SNSInvalidParameter("Invalid SMS endpoint: {}".format(endpoint))
# AWS doesn't create duplicates
old_subscription = self._find_subscription(topic_arn, endpoint, protocol)
if old_subscription:

View File

@ -211,14 +211,6 @@ class SNSResponse(BaseResponse):
protocol = self._get_param("Protocol")
attributes = self._get_attributes()
if protocol == "sms" and not is_e164(endpoint):
return (
self._error(
"InvalidParameter", "Phone number does not meet the E164 format"
),
dict(status=400),
)
subscription = self.backend.subscribe(topic_arn, endpoint, protocol)
if attributes is not None:

View File

@ -1,19 +1,20 @@
from __future__ import unicode_literals
import re
from six.moves.urllib.parse import urlparse
from moto.core.responses import BaseResponse
from moto.core.utils import amz_crc32, amzn_request_id
from .utils import parse_message_attributes
from .models import sqs_backends
from six.moves.urllib.parse import urlparse
from .exceptions import (
EmptyBatchRequest,
InvalidAttributeName,
MessageAttributesInvalid,
MessageNotInflight,
ReceiptHandleIsInvalid,
EmptyBatchRequest,
InvalidAttributeName,
)
from .models import sqs_backends
from .utils import parse_message_attributes
MAXIMUM_VISIBILTY_TIMEOUT = 43200
MAXIMUM_MESSAGE_LENGTH = 262144 # 256 KiB

View File

@ -259,7 +259,10 @@ class Command(BaseModel):
class SimpleSystemManagerBackend(BaseBackend):
def __init__(self):
self._parameters = {}
# each value is a list of all of the versions for a parameter
# to get the current value, grab the last item of the list
self._parameters = defaultdict(list)
self._resource_tags = defaultdict(lambda: defaultdict(dict))
self._commands = []
self._errors = []
@ -294,8 +297,8 @@ class SimpleSystemManagerBackend(BaseBackend):
self._validate_parameter_filters(parameter_filters, by_path=False)
result = []
for param in self._parameters:
ssm_parameter = self._parameters[param]
for param_name in self._parameters:
ssm_parameter = self.get_parameter(param_name, False)
if not self._match_filters(ssm_parameter, parameter_filters):
continue
@ -504,7 +507,7 @@ class SimpleSystemManagerBackend(BaseBackend):
result = []
for name in names:
if name in self._parameters:
result.append(self._parameters[name])
result.append(self.get_parameter(name, with_decryption))
return result
def get_parameters_by_path(self, path, with_decryption, recursive, filters=None):
@ -513,17 +516,24 @@ class SimpleSystemManagerBackend(BaseBackend):
# path could be with or without a trailing /. we handle this
# difference here.
path = path.rstrip("/") + "/"
for param in self._parameters:
if path != "/" and not param.startswith(path):
for param_name in self._parameters:
if path != "/" and not param_name.startswith(path):
continue
if "/" in param[len(path) + 1 :] and not recursive:
if "/" in param_name[len(path) + 1 :] and not recursive:
continue
if not self._match_filters(self._parameters[param], filters):
if not self._match_filters(
self.get_parameter(param_name, with_decryption), filters
):
continue
result.append(self._parameters[param])
result.append(self.get_parameter(param_name, with_decryption))
return result
def get_parameter_history(self, name, with_decryption):
if name in self._parameters:
return self._parameters[name]
return None
def _match_filters(self, parameter, filters=None):
"""Return True if the given parameter matches all the filters"""
for filter_obj in filters or []:
@ -579,31 +589,35 @@ class SimpleSystemManagerBackend(BaseBackend):
def get_parameter(self, name, with_decryption):
if name in self._parameters:
return self._parameters[name]
return self._parameters[name][-1]
return None
def put_parameter(
self, name, description, value, type, allowed_pattern, keyid, overwrite
):
previous_parameter = self._parameters.get(name)
version = 1
if previous_parameter:
previous_parameter_versions = self._parameters[name]
if len(previous_parameter_versions) == 0:
previous_parameter = None
version = 1
else:
previous_parameter = previous_parameter_versions[-1]
version = previous_parameter.version + 1
if not overwrite:
return
last_modified_date = time.time()
self._parameters[name] = Parameter(
name,
value,
type,
description,
allowed_pattern,
keyid,
last_modified_date,
version,
self._parameters[name].append(
Parameter(
name,
value,
type,
description,
allowed_pattern,
keyid,
last_modified_date,
version,
)
)
return version

View File

@ -139,6 +139,28 @@ class SimpleSystemManagerResponse(BaseResponse):
response = {"Version": result}
return json.dumps(response)
def get_parameter_history(self):
name = self._get_param("Name")
with_decryption = self._get_param("WithDecryption")
result = self.ssm_backend.get_parameter_history(name, with_decryption)
if result is None:
error = {
"__type": "ParameterNotFound",
"message": "Parameter {0} not found.".format(name),
}
return json.dumps(error), dict(status=400)
response = {"Parameters": []}
for parameter_version in result:
param_data = parameter_version.describe_response_object(
decrypt=with_decryption
)
response["Parameters"].append(param_data)
return json.dumps(response)
def add_tags_to_resource(self):
resource_id = self._get_param("ResourceId")
resource_type = self._get_param("ResourceType")

View File

@ -9,6 +9,7 @@ from botocore.exceptions import ClientError
import responses
from moto import mock_apigateway, settings
from nose.tools import assert_raises
@freeze_time("2015-01-01")
@ -45,6 +46,32 @@ def test_list_and_delete_apis():
len(response["items"]).should.equal(1)
@mock_apigateway
def test_create_resource__validate_name():
client = boto3.client("apigateway", region_name="us-west-2")
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
0
]["id"]
invalid_names = ["/users", "users/", "users/{user_id}", "us{er"]
valid_names = ["users", "{user_id}", "user_09", "good-dog"]
# All invalid names should throw an exception
for name in invalid_names:
with assert_raises(ClientError) as ex:
client.create_resource(restApiId=api_id, parentId=root_id, pathPart=name)
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"Resource's path part only allow a-zA-Z0-9._- and curly braces at the beginning and the end."
)
# All valid names should go through
for name in valid_names:
client.create_resource(restApiId=api_id, parentId=root_id, pathPart=name)
@mock_apigateway
def test_create_resource():
client = boto3.client("apigateway", region_name="us-west-2")
@ -69,9 +96,7 @@ def test_create_resource():
}
)
response = client.create_resource(
restApiId=api_id, parentId=root_id, pathPart="/users"
)
client.create_resource(restApiId=api_id, parentId=root_id, pathPart="users")
resources = client.get_resources(restApiId=api_id)["items"]
len(resources).should.equal(2)
@ -79,9 +104,7 @@ def test_create_resource():
0
]
response = client.delete_resource(
restApiId=api_id, resourceId=non_root_resource["id"]
)
client.delete_resource(restApiId=api_id, resourceId=non_root_resource["id"])
len(client.get_resources(restApiId=api_id)["items"]).should.equal(1)
@ -223,6 +246,7 @@ def test_integrations():
httpMethod="GET",
type="HTTP",
uri="http://httpbin.org/robots.txt",
integrationHttpMethod="POST",
)
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
@ -308,6 +332,7 @@ def test_integrations():
type="HTTP",
uri=test_uri,
requestTemplates=templates,
integrationHttpMethod="POST",
)
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
@ -340,12 +365,13 @@ def test_integration_response():
restApiId=api_id, resourceId=root_id, httpMethod="GET", statusCode="200"
)
response = client.put_integration(
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type="HTTP",
uri="http://httpbin.org/robots.txt",
integrationHttpMethod="POST",
)
response = client.put_integration_response(
@ -354,6 +380,7 @@ def test_integration_response():
httpMethod="GET",
statusCode="200",
selectionPattern="foobar",
responseTemplates={},
)
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
@ -410,6 +437,7 @@ def test_update_stage_configuration():
stage_name = "staging"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
create_method_integration(client, api_id)
response = client.create_deployment(
restApiId=api_id, stageName=stage_name, description="1.0.1"
@ -534,7 +562,8 @@ def test_create_stage():
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
response = client.create_deployment(restApiId=api_id, stageName=stage_name)
create_method_integration(client, api_id)
response = client.create_deployment(restApiId=api_id, stageName=stage_name,)
deployment_id = response["id"]
response = client.get_deployment(restApiId=api_id, deploymentId=deployment_id)
@ -690,12 +719,325 @@ def test_create_stage():
stage["cacheClusterSize"].should.equal("1.6")
@mock_apigateway
def test_create_deployment_requires_REST_methods():
client = boto3.client("apigateway", region_name="us-west-2")
stage_name = "staging"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
with assert_raises(ClientError) as ex:
client.create_deployment(restApiId=api_id, stageName=stage_name)["id"]
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"The REST API doesn't contain any methods"
)
@mock_apigateway
def test_create_deployment_requires_REST_method_integrations():
client = boto3.client("apigateway", region_name="us-west-2")
stage_name = "staging"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
0
]["id"]
client.put_method(
restApiId=api_id, resourceId=root_id, httpMethod="GET", authorizationType="NONE"
)
with assert_raises(ClientError) as ex:
client.create_deployment(restApiId=api_id, stageName=stage_name)["id"]
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"No integration defined for method"
)
@mock_apigateway
def test_create_simple_deployment_with_get_method():
client = boto3.client("apigateway", region_name="us-west-2")
stage_name = "staging"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
create_method_integration(client, api_id)
deployment = client.create_deployment(restApiId=api_id, stageName=stage_name)
assert "id" in deployment
@mock_apigateway
def test_create_simple_deployment_with_post_method():
client = boto3.client("apigateway", region_name="us-west-2")
stage_name = "staging"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
create_method_integration(client, api_id, httpMethod="POST")
deployment = client.create_deployment(restApiId=api_id, stageName=stage_name)
assert "id" in deployment
@mock_apigateway
# https://github.com/aws/aws-sdk-js/issues/2588
def test_put_integration_response_requires_responseTemplate():
client = boto3.client("apigateway", region_name="us-west-2")
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
0
]["id"]
client.put_method(
restApiId=api_id, resourceId=root_id, httpMethod="GET", authorizationType="NONE"
)
client.put_method_response(
restApiId=api_id, resourceId=root_id, httpMethod="GET", statusCode="200"
)
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type="HTTP",
uri="http://httpbin.org/robots.txt",
integrationHttpMethod="POST",
)
with assert_raises(ClientError) as ex:
client.put_integration_response(
restApiId=api_id, resourceId=root_id, httpMethod="GET", statusCode="200"
)
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal("Invalid request input")
# Works fine if responseTemplate is defined
client.put_integration_response(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
statusCode="200",
responseTemplates={},
)
@mock_apigateway
def test_put_integration_validation():
client = boto3.client("apigateway", region_name="us-west-2")
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
0
]["id"]
client.put_method(
restApiId=api_id, resourceId=root_id, httpMethod="GET", authorizationType="NONE"
)
client.put_method_response(
restApiId=api_id, resourceId=root_id, httpMethod="GET", statusCode="200"
)
http_types = ["HTTP", "HTTP_PROXY"]
aws_types = ["AWS", "AWS_PROXY"]
types_requiring_integration_method = http_types + aws_types
types_not_requiring_integration_method = ["MOCK"]
for type in types_requiring_integration_method:
# Ensure that integrations of these types fail if no integrationHttpMethod is provided
with assert_raises(ClientError) as ex:
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type=type,
uri="http://httpbin.org/robots.txt",
)
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"Enumeration value for HttpMethod must be non-empty"
)
for type in types_not_requiring_integration_method:
# Ensure that integrations of these types do not need the integrationHttpMethod
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type=type,
uri="http://httpbin.org/robots.txt",
)
for type in http_types:
# Ensure that it works fine when providing the integrationHttpMethod-argument
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type=type,
uri="http://httpbin.org/robots.txt",
integrationHttpMethod="POST",
)
for type in ["AWS"]:
# Ensure that it works fine when providing the integrationHttpMethod + credentials
client.put_integration(
restApiId=api_id,
resourceId=root_id,
credentials="arn:aws:iam::123456789012:role/service-role/testfunction-role-oe783psq",
httpMethod="GET",
type=type,
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
integrationHttpMethod="POST",
)
for type in aws_types:
# Ensure that credentials are not required when URI points to a Lambda stream
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type=type,
uri="arn:aws:apigateway:eu-west-1:lambda:path/2015-03-31/functions/arn:aws:lambda:eu-west-1:012345678901:function:MyLambda/invocations",
integrationHttpMethod="POST",
)
for type in ["AWS_PROXY"]:
# Ensure that aws_proxy does not support S3
with assert_raises(ClientError) as ex:
client.put_integration(
restApiId=api_id,
resourceId=root_id,
credentials="arn:aws:iam::123456789012:role/service-role/testfunction-role-oe783psq",
httpMethod="GET",
type=type,
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
integrationHttpMethod="POST",
)
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"Integrations of type 'AWS_PROXY' currently only supports Lambda function and Firehose stream invocations."
)
for type in aws_types:
# Ensure that the Role ARN is for the current account
with assert_raises(ClientError) as ex:
client.put_integration(
restApiId=api_id,
resourceId=root_id,
credentials="arn:aws:iam::000000000000:role/service-role/testrole",
httpMethod="GET",
type=type,
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
integrationHttpMethod="POST",
)
ex.exception.response["Error"]["Code"].should.equal("AccessDeniedException")
ex.exception.response["Error"]["Message"].should.equal(
"Cross-account pass role is not allowed."
)
for type in ["AWS"]:
# Ensure that the Role ARN is specified for aws integrations
with assert_raises(ClientError) as ex:
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type=type,
uri="arn:aws:apigateway:us-west-2:s3:path/b/k",
integrationHttpMethod="POST",
)
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"Role ARN must be specified for AWS integrations"
)
for type in http_types:
# Ensure that the URI is valid HTTP
with assert_raises(ClientError) as ex:
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type=type,
uri="non-valid-http",
integrationHttpMethod="POST",
)
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"Invalid HTTP endpoint specified for URI"
)
for type in aws_types:
# Ensure that the URI is an ARN
with assert_raises(ClientError) as ex:
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type=type,
uri="non-valid-arn",
integrationHttpMethod="POST",
)
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"Invalid ARN specified in the request"
)
for type in aws_types:
# Ensure that the URI is a valid ARN
with assert_raises(ClientError) as ex:
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
type=type,
uri="arn:aws:iam::0000000000:role/service-role/asdf",
integrationHttpMethod="POST",
)
ex.exception.response["Error"]["Code"].should.equal("BadRequestException")
ex.exception.response["Error"]["Message"].should.equal(
"AWS ARN for integration must contain path or action"
)
@mock_apigateway
def test_delete_stage():
client = boto3.client("apigateway", region_name="us-west-2")
stage_name = "staging"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
create_method_integration(client, api_id)
deployment_id1 = client.create_deployment(restApiId=api_id, stageName=stage_name)[
"id"
]
deployment_id2 = client.create_deployment(restApiId=api_id, stageName=stage_name)[
"id"
]
new_stage_name = "current"
client.create_stage(
restApiId=api_id, stageName=new_stage_name, deploymentId=deployment_id1
)
new_stage_name_with_vars = "stage_with_vars"
client.create_stage(
restApiId=api_id,
stageName=new_stage_name_with_vars,
deploymentId=deployment_id2,
variables={"env": "dev"},
)
stages = client.get_stages(restApiId=api_id)["item"]
sorted([stage["stageName"] for stage in stages]).should.equal(
sorted([new_stage_name, new_stage_name_with_vars, stage_name])
)
# delete stage
response = client.delete_stage(restApiId=api_id, stageName=new_stage_name_with_vars)
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(202)
# verify other stage still exists
stages = client.get_stages(restApiId=api_id)["item"]
sorted([stage["stageName"] for stage in stages]).should.equal(
sorted([new_stage_name, stage_name])
)
@mock_apigateway
def test_deployment():
client = boto3.client("apigateway", region_name="us-west-2")
stage_name = "staging"
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
create_method_integration(client, api_id)
response = client.create_deployment(restApiId=api_id, stageName=stage_name)
deployment_id = response["id"]
@ -719,7 +1061,7 @@ def test_deployment():
response["items"][0].pop("createdDate")
response["items"].should.equal([{"id": deployment_id, "description": ""}])
response = client.delete_deployment(restApiId=api_id, deploymentId=deployment_id)
client.delete_deployment(restApiId=api_id, deploymentId=deployment_id)
response = client.get_deployments(restApiId=api_id)
len(response["items"]).should.equal(0)
@ -730,7 +1072,7 @@ def test_deployment():
stage["stageName"].should.equal(stage_name)
stage["deploymentId"].should.equal(deployment_id)
stage = client.update_stage(
client.update_stage(
restApiId=api_id,
stageName=stage_name,
patchOperations=[
@ -774,6 +1116,7 @@ def test_http_proxying_integration():
httpMethod="GET",
type="HTTP",
uri="http://httpbin.org/robots.txt",
integrationHttpMethod="POST",
)
stage_name = "staging"
@ -888,7 +1231,6 @@ def test_usage_plans():
@mock_apigateway
def test_usage_plan_keys():
region_name = "us-west-2"
usage_plan_id = "test_usage_plan_id"
client = boto3.client("apigateway", region_name=region_name)
usage_plan_id = "test"
@ -932,7 +1274,6 @@ def test_usage_plan_keys():
@mock_apigateway
def test_create_usage_plan_key_non_existent_api_key():
region_name = "us-west-2"
usage_plan_id = "test_usage_plan_id"
client = boto3.client("apigateway", region_name=region_name)
usage_plan_id = "test"
@ -976,3 +1317,34 @@ def test_get_usage_plans_using_key_id():
len(only_plans_with_key["items"]).should.equal(1)
only_plans_with_key["items"][0]["name"].should.equal(attached_plan["name"])
only_plans_with_key["items"][0]["id"].should.equal(attached_plan["id"])
def create_method_integration(client, api_id, httpMethod="GET"):
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
0
]["id"]
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod=httpMethod,
authorizationType="NONE",
)
client.put_method_response(
restApiId=api_id, resourceId=root_id, httpMethod=httpMethod, statusCode="200"
)
client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod=httpMethod,
type="HTTP",
uri="http://httpbin.org/robots.txt",
integrationHttpMethod="POST",
)
client.put_integration_response(
restApiId=api_id,
resourceId=root_id,
httpMethod=httpMethod,
statusCode="200",
responseTemplates={},
)

View File

@ -388,6 +388,7 @@ def test_get_function():
Timeout=3,
MemorySize=128,
Publish=True,
Environment={"Variables": {"test_variable": "test_value"}},
)
result = conn.get_function(FunctionName="testFunction")
@ -416,6 +417,11 @@ def test_get_function():
result["Configuration"]["Timeout"].should.equal(3)
result["Configuration"]["Version"].should.equal("$LATEST")
result["Configuration"].should.contain("VpcConfig")
result["Configuration"].should.contain("Environment")
result["Configuration"]["Environment"].should.contain("Variables")
result["Configuration"]["Environment"]["Variables"].should.equal(
{"test_variable": "test_value"}
)
# Test get function with
result = conn.get_function(FunctionName="testFunction", Qualifier="$LATEST")
@ -429,6 +435,33 @@ def test_get_function():
conn.get_function(FunctionName="junk", Qualifier="$LATEST")
@mock_lambda
@mock_s3
def test_get_function_by_arn():
bucket_name = "test-bucket"
s3_conn = boto3.client("s3", "us-east-1")
s3_conn.create_bucket(Bucket=bucket_name)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-east-1")
fnc = conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
Role="test-iam-role",
Handler="lambda_function.lambda_handler",
Code={"S3Bucket": bucket_name, "S3Key": "test.zip"},
Description="test lambda function",
Timeout=3,
MemorySize=128,
Publish=True,
)
result = conn.get_function(FunctionName=fnc["FunctionArn"])
result["Configuration"]["FunctionName"].should.equal("testFunction")
@mock_lambda
@mock_s3
def test_delete_function():
@ -1322,6 +1355,7 @@ def test_update_configuration():
Timeout=3,
MemorySize=128,
Publish=True,
Environment={"Variables": {"test_old_environment": "test_old_value"}},
)
assert fxn["Description"] == "test lambda function"
@ -1336,6 +1370,7 @@ def test_update_configuration():
Handler="lambda_function.new_lambda_handler",
Runtime="python3.6",
Timeout=7,
Environment={"Variables": {"test_environment": "test_value"}},
)
assert updated_config["ResponseMetadata"]["HTTPStatusCode"] == 200
@ -1344,6 +1379,9 @@ def test_update_configuration():
assert updated_config["MemorySize"] == 128
assert updated_config["Runtime"] == "python3.6"
assert updated_config["Timeout"] == 7
assert updated_config["Environment"]["Variables"] == {
"test_environment": "test_value"
}
@mock_lambda

View File

View File

@ -0,0 +1,327 @@
import logging
import boto
import boto3
from botocore.exceptions import ClientError
from moto import mock_datasync
from nose.tools import assert_raises
def create_locations(client, create_smb=False, create_s3=False):
"""
Convenience function for creating locations.
Locations must exist before tasks can be created.
"""
smb_arn = None
s3_arn = None
if create_smb:
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User="",
Password="",
AgentArns=["stuff"],
)
smb_arn = response["LocationArn"]
if create_s3:
response = client.create_location_s3(
S3BucketArn="arn:aws:s3:::my_bucket",
Subdirectory="dir",
S3Config={"BucketAccessRoleArn": "role"},
)
s3_arn = response["LocationArn"]
return {"smb_arn": smb_arn, "s3_arn": s3_arn}
@mock_datasync
def test_create_location_smb():
client = boto3.client("datasync", region_name="us-east-1")
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User="",
Password="",
AgentArns=["stuff"],
)
assert "LocationArn" in response
@mock_datasync
def test_describe_location_smb():
client = boto3.client("datasync", region_name="us-east-1")
agent_arns = ["stuff"]
user = "user"
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User=user,
Password="",
AgentArns=agent_arns,
)
response = client.describe_location_smb(LocationArn=response["LocationArn"])
assert "LocationArn" in response
assert "LocationUri" in response
assert response["User"] == user
assert response["AgentArns"] == agent_arns
@mock_datasync
def test_create_location_s3():
client = boto3.client("datasync", region_name="us-east-1")
response = client.create_location_s3(
S3BucketArn="arn:aws:s3:::my_bucket",
Subdirectory="dir",
S3Config={"BucketAccessRoleArn": "role"},
)
assert "LocationArn" in response
@mock_datasync
def test_describe_location_s3():
client = boto3.client("datasync", region_name="us-east-1")
s3_config = {"BucketAccessRoleArn": "role"}
response = client.create_location_s3(
S3BucketArn="arn:aws:s3:::my_bucket", Subdirectory="dir", S3Config=s3_config
)
response = client.describe_location_s3(LocationArn=response["LocationArn"])
assert "LocationArn" in response
assert "LocationUri" in response
assert response["S3Config"] == s3_config
@mock_datasync
def test_describe_location_wrong():
client = boto3.client("datasync", region_name="us-east-1")
agent_arns = ["stuff"]
user = "user"
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User=user,
Password="",
AgentArns=agent_arns,
)
with assert_raises(ClientError) as e:
response = client.describe_location_s3(LocationArn=response["LocationArn"])
@mock_datasync
def test_list_locations():
client = boto3.client("datasync", region_name="us-east-1")
response = client.list_locations()
assert len(response["Locations"]) == 0
create_locations(client, create_smb=True)
response = client.list_locations()
assert len(response["Locations"]) == 1
assert response["Locations"][0]["LocationUri"] == "smb://host/somewhere"
create_locations(client, create_s3=True)
response = client.list_locations()
assert len(response["Locations"]) == 2
assert response["Locations"][1]["LocationUri"] == "s3://my_bucket/dir"
create_locations(client, create_s3=True)
response = client.list_locations()
assert len(response["Locations"]) == 3
assert response["Locations"][2]["LocationUri"] == "s3://my_bucket/dir"
@mock_datasync
def test_create_task():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_smb=True, create_s3=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
)
assert "TaskArn" in response
@mock_datasync
def test_create_task_fail():
""" Test that Locations must exist before a Task can be created """
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_smb=True, create_s3=True)
with assert_raises(ClientError) as e:
response = client.create_task(
SourceLocationArn="1", DestinationLocationArn=locations["s3_arn"]
)
with assert_raises(ClientError) as e:
response = client.create_task(
SourceLocationArn=locations["smb_arn"], DestinationLocationArn="2"
)
@mock_datasync
def test_list_tasks():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
)
response = client.create_task(
SourceLocationArn=locations["s3_arn"],
DestinationLocationArn=locations["smb_arn"],
Name="task_name",
)
response = client.list_tasks()
tasks = response["Tasks"]
assert len(tasks) == 2
task = tasks[0]
assert task["Status"] == "AVAILABLE"
assert "Name" not in task
task = tasks[1]
assert task["Status"] == "AVAILABLE"
assert task["Name"] == "task_name"
@mock_datasync
def test_describe_task():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.describe_task(TaskArn=task_arn)
assert "TaskArn" in response
assert "Status" in response
assert "SourceLocationArn" in response
assert "DestinationLocationArn" in response
@mock_datasync
def test_describe_task_not_exist():
client = boto3.client("datasync", region_name="us-east-1")
with assert_raises(ClientError) as e:
client.describe_task(TaskArn="abc")
@mock_datasync
def test_start_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.describe_task(TaskArn=task_arn)
assert "CurrentTaskExecutionArn" not in response
response = client.start_task_execution(TaskArn=task_arn)
assert "TaskExecutionArn" in response
task_execution_arn = response["TaskExecutionArn"]
response = client.describe_task(TaskArn=task_arn)
assert response["CurrentTaskExecutionArn"] == task_execution_arn
@mock_datasync
def test_start_task_execution_twice():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
assert "TaskExecutionArn" in response
task_execution_arn = response["TaskExecutionArn"]
with assert_raises(ClientError) as e:
response = client.start_task_execution(TaskArn=task_arn)
@mock_datasync
def test_describe_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
task_execution_arn = response["TaskExecutionArn"]
# Each time task_execution is described the Status will increment
# This is a simple way to simulate a task being executed
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "INITIALIZING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "PREPARING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "TRANSFERRING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "VERIFYING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "SUCCESS"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "SUCCESS"
@mock_datasync
def test_describe_task_execution_not_exist():
client = boto3.client("datasync", region_name="us-east-1")
with assert_raises(ClientError) as e:
client.describe_task_execution(TaskExecutionArn="abc")
@mock_datasync
def test_cancel_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
task_execution_arn = response["TaskExecutionArn"]
response = client.describe_task(TaskArn=task_arn)
assert response["CurrentTaskExecutionArn"] == task_execution_arn
response = client.cancel_task_execution(TaskExecutionArn=task_execution_arn)
response = client.describe_task(TaskArn=task_arn)
assert "CurrentTaskExecutionArn" not in response
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["Status"] == "ERROR"

View File

@ -559,6 +559,308 @@ def test_basic_projection_expressions_using_scan():
assert "forum_name" in results["Items"][1]
@mock_dynamodb2
def test_nested_projection_expression_using_get_item():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
dynamodb.create_table(
TableName="users",
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = dynamodb.Table("users")
table.put_item(
Item={
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
"foo": "bar",
}
)
table.put_item(
Item={
"forum_name": "key2",
"nested": {"id": "id2", "incode": "code2"},
"foo": "bar",
}
)
# Test a get_item returning all items
result = table.get_item(
Key={"forum_name": "key1"},
ProjectionExpression="nested.level1.id, nested.level2",
)["Item"]
result.should.equal(
{"nested": {"level1": {"id": "id1"}, "level2": {"id": "id2", "include": "all"}}}
)
# Assert actual data has not been deleted
result = table.get_item(Key={"forum_name": "key1"})["Item"]
result.should.equal(
{
"foo": "bar",
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
}
)
@mock_dynamodb2
def test_basic_projection_expressions_using_query():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
dynamodb.create_table(
TableName="users",
KeySchema=[
{"AttributeName": "forum_name", "KeyType": "HASH"},
{"AttributeName": "subject", "KeyType": "RANGE"},
],
AttributeDefinitions=[
{"AttributeName": "forum_name", "AttributeType": "S"},
{"AttributeName": "subject", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = dynamodb.Table("users")
table.put_item(
Item={"forum_name": "the-key", "subject": "123", "body": "some test message"}
)
table.put_item(
Item={
"forum_name": "not-the-key",
"subject": "123",
"body": "some other test message",
}
)
# Test a query returning all items
result = table.query(
KeyConditionExpression=Key("forum_name").eq("the-key"),
ProjectionExpression="body, subject",
)["Items"][0]
assert "body" in result
assert result["body"] == "some test message"
assert "subject" in result
assert "forum_name" not in result
table.put_item(
Item={
"forum_name": "the-key",
"subject": "1234",
"body": "yet another test message",
}
)
items = table.query(
KeyConditionExpression=Key("forum_name").eq("the-key"),
ProjectionExpression="body",
)["Items"]
assert "body" in items[0]
assert "subject" not in items[0]
assert items[0]["body"] == "some test message"
assert "body" in items[1]
assert "subject" not in items[1]
assert items[1]["body"] == "yet another test message"
# The projection expression should not remove data from storage
items = table.query(KeyConditionExpression=Key("forum_name").eq("the-key"))["Items"]
assert "subject" in items[0]
assert "body" in items[1]
assert "forum_name" in items[1]
@mock_dynamodb2
def test_nested_projection_expression_using_query():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
dynamodb.create_table(
TableName="users",
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = dynamodb.Table("users")
table.put_item(
Item={
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
"foo": "bar",
}
)
table.put_item(
Item={
"forum_name": "key2",
"nested": {"id": "id2", "incode": "code2"},
"foo": "bar",
}
)
# Test a query returning all items
result = table.query(
KeyConditionExpression=Key("forum_name").eq("key1"),
ProjectionExpression="nested.level1.id, nested.level2",
)["Items"][0]
assert "nested" in result
result["nested"].should.equal(
{"level1": {"id": "id1"}, "level2": {"id": "id2", "include": "all"}}
)
assert "foo" not in result
# Assert actual data has not been deleted
result = table.query(KeyConditionExpression=Key("forum_name").eq("key1"))["Items"][
0
]
result.should.equal(
{
"foo": "bar",
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
}
)
@mock_dynamodb2
def test_basic_projection_expressions_using_scan():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
dynamodb.create_table(
TableName="users",
KeySchema=[
{"AttributeName": "forum_name", "KeyType": "HASH"},
{"AttributeName": "subject", "KeyType": "RANGE"},
],
AttributeDefinitions=[
{"AttributeName": "forum_name", "AttributeType": "S"},
{"AttributeName": "subject", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = dynamodb.Table("users")
table.put_item(
Item={"forum_name": "the-key", "subject": "123", "body": "some test message"}
)
table.put_item(
Item={
"forum_name": "not-the-key",
"subject": "123",
"body": "some other test message",
}
)
# Test a scan returning all items
results = table.scan(
FilterExpression=Key("forum_name").eq("the-key"),
ProjectionExpression="body, subject",
)["Items"]
results.should.equal([{"body": "some test message", "subject": "123"}])
table.put_item(
Item={
"forum_name": "the-key",
"subject": "1234",
"body": "yet another test message",
}
)
results = table.scan(
FilterExpression=Key("forum_name").eq("the-key"), ProjectionExpression="body"
)["Items"]
assert {"body": "some test message"} in results
assert {"body": "yet another test message"} in results
# The projection expression should not remove data from storage
results = table.query(KeyConditionExpression=Key("forum_name").eq("the-key"))
assert "subject" in results["Items"][0]
assert "body" in results["Items"][1]
assert "forum_name" in results["Items"][1]
@mock_dynamodb2
def test_nested_projection_expression_using_scan():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
dynamodb.create_table(
TableName="users",
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = dynamodb.Table("users")
table.put_item(
Item={
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
"foo": "bar",
}
)
table.put_item(
Item={
"forum_name": "key2",
"nested": {"id": "id2", "incode": "code2"},
"foo": "bar",
}
)
# Test a scan
results = table.scan(
FilterExpression=Key("forum_name").eq("key1"),
ProjectionExpression="nested.level1.id, nested.level2",
)["Items"]
results.should.equal(
[
{
"nested": {
"level1": {"id": "id1"},
"level2": {"include": "all", "id": "id2"},
}
}
]
)
# Assert original data is still there
results = table.scan(FilterExpression=Key("forum_name").eq("key1"))["Items"]
results.should.equal(
[
{
"forum_name": "key1",
"foo": "bar",
"nested": {
"level1": {"att": "irrelevant", "id": "id1"},
"level2": {"include": "all", "id": "id2"},
"level3": {"id": "irrelevant"},
},
}
]
)
@mock_dynamodb2
def test_basic_projection_expression_using_get_item_with_attr_expression_names():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
@ -658,6 +960,121 @@ def test_basic_projection_expressions_using_query_with_attr_expression_names():
assert results["Items"][0]["attachment"] == "something"
@mock_dynamodb2
def test_nested_projection_expression_using_get_item_with_attr_expression():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
dynamodb.create_table(
TableName="users",
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = dynamodb.Table("users")
table.put_item(
Item={
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
"foo": "bar",
}
)
table.put_item(
Item={
"forum_name": "key2",
"nested": {"id": "id2", "incode": "code2"},
"foo": "bar",
}
)
# Test a get_item returning all items
result = table.get_item(
Key={"forum_name": "key1"},
ProjectionExpression="#nst.level1.id, #nst.#lvl2",
ExpressionAttributeNames={"#nst": "nested", "#lvl2": "level2"},
)["Item"]
result.should.equal(
{"nested": {"level1": {"id": "id1"}, "level2": {"id": "id2", "include": "all"}}}
)
# Assert actual data has not been deleted
result = table.get_item(Key={"forum_name": "key1"})["Item"]
result.should.equal(
{
"foo": "bar",
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
}
)
@mock_dynamodb2
def test_nested_projection_expression_using_query_with_attr_expression_names():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
dynamodb.create_table(
TableName="users",
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = dynamodb.Table("users")
table.put_item(
Item={
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
"foo": "bar",
}
)
table.put_item(
Item={
"forum_name": "key2",
"nested": {"id": "id2", "incode": "code2"},
"foo": "bar",
}
)
# Test a query returning all items
result = table.query(
KeyConditionExpression=Key("forum_name").eq("key1"),
ProjectionExpression="#nst.level1.id, #nst.#lvl2",
ExpressionAttributeNames={"#nst": "nested", "#lvl2": "level2"},
)["Items"][0]
assert "nested" in result
result["nested"].should.equal(
{"level1": {"id": "id1"}, "level2": {"id": "id2", "include": "all"}}
)
assert "foo" not in result
# Assert actual data has not been deleted
result = table.query(KeyConditionExpression=Key("forum_name").eq("key1"))["Items"][
0
]
result.should.equal(
{
"foo": "bar",
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
}
)
@mock_dynamodb2
def test_basic_projection_expressions_using_scan_with_attr_expression_names():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
@ -719,6 +1136,70 @@ def test_basic_projection_expressions_using_scan_with_attr_expression_names():
assert "form_name" not in results["Items"][0]
@mock_dynamodb2
def test_nested_projection_expression_using_scan_with_attr_expression_names():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
dynamodb.create_table(
TableName="users",
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = dynamodb.Table("users")
table.put_item(
Item={
"forum_name": "key1",
"nested": {
"level1": {"id": "id1", "att": "irrelevant"},
"level2": {"id": "id2", "include": "all"},
"level3": {"id": "irrelevant"},
},
"foo": "bar",
}
)
table.put_item(
Item={
"forum_name": "key2",
"nested": {"id": "id2", "incode": "code2"},
"foo": "bar",
}
)
# Test a scan
results = table.scan(
FilterExpression=Key("forum_name").eq("key1"),
ProjectionExpression="nested.level1.id, nested.level2",
ExpressionAttributeNames={"#nst": "nested", "#lvl2": "level2"},
)["Items"]
results.should.equal(
[
{
"nested": {
"level1": {"id": "id1"},
"level2": {"include": "all", "id": "id2"},
}
}
]
)
# Assert original data is still there
results = table.scan(FilterExpression=Key("forum_name").eq("key1"))["Items"]
results.should.equal(
[
{
"forum_name": "key1",
"foo": "bar",
"nested": {
"level1": {"att": "irrelevant", "id": "id1"},
"level2": {"include": "all", "id": "id2"},
"level3": {"id": "irrelevant"},
},
}
]
)
@mock_dynamodb2
def test_put_item_returns_consumed_capacity():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")

View File

@ -2195,3 +2195,110 @@ def test_list_open_id_connect_providers():
sorted(response["OpenIDConnectProviderList"], key=lambda i: i["Arn"]).should.equal(
[{"Arn": open_id_arn_1}, {"Arn": open_id_arn_2}, {"Arn": open_id_arn_3}]
)
@mock_iam
def test_update_account_password_policy():
client = boto3.client("iam", region_name="us-east-1")
client.update_account_password_policy()
response = client.get_account_password_policy()
response["PasswordPolicy"].should.equal(
{
"AllowUsersToChangePassword": False,
"ExpirePasswords": False,
"MinimumPasswordLength": 6,
"RequireLowercaseCharacters": False,
"RequireNumbers": False,
"RequireSymbols": False,
"RequireUppercaseCharacters": False,
}
)
@mock_iam
def test_update_account_password_policy_errors():
client = boto3.client("iam", region_name="us-east-1")
client.update_account_password_policy.when.called_with(
MaxPasswordAge=1096, MinimumPasswordLength=129, PasswordReusePrevention=25
).should.throw(
ClientError,
"3 validation errors detected: "
'Value "129" at "minimumPasswordLength" failed to satisfy constraint: '
"Member must have value less than or equal to 128; "
'Value "25" at "passwordReusePrevention" failed to satisfy constraint: '
"Member must have value less than or equal to 24; "
'Value "1096" at "maxPasswordAge" failed to satisfy constraint: '
"Member must have value less than or equal to 1095",
)
@mock_iam
def test_get_account_password_policy():
client = boto3.client("iam", region_name="us-east-1")
client.update_account_password_policy(
AllowUsersToChangePassword=True,
HardExpiry=True,
MaxPasswordAge=60,
MinimumPasswordLength=10,
PasswordReusePrevention=3,
RequireLowercaseCharacters=True,
RequireNumbers=True,
RequireSymbols=True,
RequireUppercaseCharacters=True,
)
response = client.get_account_password_policy()
response["PasswordPolicy"].should.equal(
{
"AllowUsersToChangePassword": True,
"ExpirePasswords": True,
"HardExpiry": True,
"MaxPasswordAge": 60,
"MinimumPasswordLength": 10,
"PasswordReusePrevention": 3,
"RequireLowercaseCharacters": True,
"RequireNumbers": True,
"RequireSymbols": True,
"RequireUppercaseCharacters": True,
}
)
@mock_iam
def test_get_account_password_policy_errors():
client = boto3.client("iam", region_name="us-east-1")
client.get_account_password_policy.when.called_with().should.throw(
ClientError,
"The Password Policy with domain name 123456789012 cannot be found.",
)
@mock_iam
def test_delete_account_password_policy():
client = boto3.client("iam", region_name="us-east-1")
client.update_account_password_policy()
response = client.get_account_password_policy()
response.should.have.key("PasswordPolicy").which.should.be.a(dict)
client.delete_account_password_policy()
client.get_account_password_policy.when.called_with().should.throw(
ClientError,
"The Password Policy with domain name 123456789012 cannot be found.",
)
@mock_iam
def test_delete_account_password_policy_errors():
client = boto3.client("iam", region_name="us-east-1")
client.delete_account_password_policy.when.called_with().should.throw(
ClientError, "The account policy with name PasswordPolicy cannot be found."
)

View File

@ -19,7 +19,10 @@ def test_subscribe_sms():
arn = resp["TopicArn"]
resp = client.subscribe(TopicArn=arn, Protocol="sms", Endpoint="+15551234567")
resp.should.contain("SubscriptionArn")
resp.should.have.key("SubscriptionArn")
resp = client.subscribe(TopicArn=arn, Protocol="sms", Endpoint="+15/55-123.4567")
resp.should.have.key("SubscriptionArn")
@mock_sns
@ -51,6 +54,18 @@ def test_subscribe_bad_sms():
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameter")
client.subscribe.when.called_with(
TopicArn=arn, Protocol="sms", Endpoint="+15--551234567"
).should.throw(ClientError, "Invalid SMS endpoint: +15--551234567")
client.subscribe.when.called_with(
TopicArn=arn, Protocol="sms", Endpoint="+15551234567."
).should.throw(ClientError, "Invalid SMS endpoint: +15551234567.")
client.subscribe.when.called_with(
TopicArn=arn, Protocol="sms", Endpoint="/+15551234567"
).should.throw(ClientError, "Invalid SMS endpoint: /+15551234567")
@mock_sns
def test_creating_subscription():

View File

@ -1,27 +1,26 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import base64
import json
import os
import time
import uuid
import boto
import boto3
import botocore.exceptions
import six
from botocore.exceptions import ClientError
from boto.exception import SQSError
from boto.sqs.message import RawMessage, Message
from freezegun import freeze_time
import base64
import json
import sure # noqa
import time
import uuid
from moto import settings, mock_sqs, mock_sqs_deprecated
from tests.helpers import requires_boto_gte
import tests.backport_assert_raises # noqa
from nose.tools import assert_raises
from boto.exception import SQSError
from boto.sqs.message import Message, RawMessage
from botocore.exceptions import ClientError
from freezegun import freeze_time
from moto import mock_sqs, mock_sqs_deprecated, settings
from nose import SkipTest
from nose.tools import assert_raises
from tests.helpers import requires_boto_gte
@mock_sqs

View File

@ -814,6 +814,85 @@ def test_put_parameter_secure_custom_kms():
response["Parameters"][0]["Type"].should.equal("SecureString")
@mock_ssm
def test_get_parameter_history():
client = boto3.client("ssm", region_name="us-east-1")
test_parameter_name = "test"
for i in range(3):
client.put_parameter(
Name=test_parameter_name,
Description="A test parameter version %d" % i,
Value="value-%d" % i,
Type="String",
Overwrite=True,
)
response = client.get_parameter_history(Name=test_parameter_name)
parameters_response = response["Parameters"]
for index, param in enumerate(parameters_response):
param["Name"].should.equal(test_parameter_name)
param["Type"].should.equal("String")
param["Value"].should.equal("value-%d" % index)
param["Version"].should.equal(index + 1)
param["Description"].should.equal("A test parameter version %d" % index)
len(parameters_response).should.equal(3)
@mock_ssm
def test_get_parameter_history_with_secure_string():
client = boto3.client("ssm", region_name="us-east-1")
test_parameter_name = "test"
for i in range(3):
client.put_parameter(
Name=test_parameter_name,
Description="A test parameter version %d" % i,
Value="value-%d" % i,
Type="SecureString",
Overwrite=True,
)
for with_decryption in [True, False]:
response = client.get_parameter_history(
Name=test_parameter_name, WithDecryption=with_decryption
)
parameters_response = response["Parameters"]
for index, param in enumerate(parameters_response):
param["Name"].should.equal(test_parameter_name)
param["Type"].should.equal("SecureString")
expected_plaintext_value = "value-%d" % index
if with_decryption:
param["Value"].should.equal(expected_plaintext_value)
else:
param["Value"].should.equal(
"kms:alias/aws/ssm:%s" % expected_plaintext_value
)
param["Version"].should.equal(index + 1)
param["Description"].should.equal("A test parameter version %d" % index)
len(parameters_response).should.equal(3)
@mock_ssm
def test_get_parameter_history_missing_parameter():
client = boto3.client("ssm", region_name="us-east-1")
try:
client.get_parameter_history(Name="test_noexist")
raise RuntimeError("Should have failed")
except botocore.exceptions.ClientError as err:
err.operation_name.should.equal("GetParameterHistory")
err.response["Error"]["Message"].should.equal(
"Parameter test_noexist not found."
)
@mock_ssm
def test_add_remove_list_tags_for_resource():
client = boto3.client("ssm", region_name="us-east-1")