commit
991a740b47
@ -4,7 +4,10 @@ Moto has a [Code of Conduct](https://github.com/spulec/moto/blob/master/CODE_OF_
|
|||||||
|
|
||||||
## Running the tests locally
|
## Running the tests locally
|
||||||
|
|
||||||
Moto has a Makefile which has some helpful commands for getting setup. You should be able to run `make init` to install the dependencies and then `make test` to run the tests.
|
Moto has a Makefile which has some helpful commands for getting setup. You should be able to run `make init` to install the dependencies and then `make test` to run the tests.
|
||||||
|
|
||||||
|
## Linting
|
||||||
|
Run `make lint` or `black --check moto tests` to verify whether your code confirms to the guidelines.
|
||||||
|
|
||||||
## Is there a missing feature?
|
## Is there a missing feature?
|
||||||
|
|
||||||
|
@ -2878,15 +2878,15 @@
|
|||||||
- [ ] test_failover
|
- [ ] test_failover
|
||||||
|
|
||||||
## elasticbeanstalk
|
## elasticbeanstalk
|
||||||
0% implemented
|
13% implemented
|
||||||
- [ ] abort_environment_update
|
- [ ] abort_environment_update
|
||||||
- [ ] apply_environment_managed_action
|
- [ ] apply_environment_managed_action
|
||||||
- [ ] check_dns_availability
|
- [ ] check_dns_availability
|
||||||
- [ ] compose_environments
|
- [ ] compose_environments
|
||||||
- [ ] create_application
|
- [X] create_application
|
||||||
- [ ] create_application_version
|
- [ ] create_application_version
|
||||||
- [ ] create_configuration_template
|
- [ ] create_configuration_template
|
||||||
- [ ] create_environment
|
- [X] create_environment
|
||||||
- [ ] create_platform_version
|
- [ ] create_platform_version
|
||||||
- [ ] create_storage_location
|
- [ ] create_storage_location
|
||||||
- [ ] delete_application
|
- [ ] delete_application
|
||||||
@ -2903,13 +2903,13 @@
|
|||||||
- [ ] describe_environment_managed_action_history
|
- [ ] describe_environment_managed_action_history
|
||||||
- [ ] describe_environment_managed_actions
|
- [ ] describe_environment_managed_actions
|
||||||
- [ ] describe_environment_resources
|
- [ ] describe_environment_resources
|
||||||
- [ ] describe_environments
|
- [X] describe_environments
|
||||||
- [ ] describe_events
|
- [ ] describe_events
|
||||||
- [ ] describe_instances_health
|
- [ ] describe_instances_health
|
||||||
- [ ] describe_platform_version
|
- [ ] describe_platform_version
|
||||||
- [ ] list_available_solution_stacks
|
- [X] list_available_solution_stacks
|
||||||
- [ ] list_platform_versions
|
- [ ] list_platform_versions
|
||||||
- [ ] list_tags_for_resource
|
- [X] list_tags_for_resource
|
||||||
- [ ] rebuild_environment
|
- [ ] rebuild_environment
|
||||||
- [ ] request_environment_info
|
- [ ] request_environment_info
|
||||||
- [ ] restart_app_server
|
- [ ] restart_app_server
|
||||||
@ -2921,7 +2921,7 @@
|
|||||||
- [ ] update_application_version
|
- [ ] update_application_version
|
||||||
- [ ] update_configuration_template
|
- [ ] update_configuration_template
|
||||||
- [ ] update_environment
|
- [ ] update_environment
|
||||||
- [ ] update_tags_for_resource
|
- [X] update_tags_for_resource
|
||||||
- [ ] validate_configuration_settings
|
- [ ] validate_configuration_settings
|
||||||
|
|
||||||
## elastictranscoder
|
## elastictranscoder
|
||||||
@ -3351,11 +3351,11 @@
|
|||||||
- [ ] update_listener
|
- [ ] update_listener
|
||||||
|
|
||||||
## glue
|
## glue
|
||||||
4% implemented
|
11% implemented
|
||||||
- [ ] batch_create_partition
|
- [X] batch_create_partition
|
||||||
- [ ] batch_delete_connection
|
- [ ] batch_delete_connection
|
||||||
- [ ] batch_delete_partition
|
- [X] batch_delete_partition
|
||||||
- [ ] batch_delete_table
|
- [X] batch_delete_table
|
||||||
- [ ] batch_delete_table_version
|
- [ ] batch_delete_table_version
|
||||||
- [ ] batch_get_crawlers
|
- [ ] batch_get_crawlers
|
||||||
- [ ] batch_get_dev_endpoints
|
- [ ] batch_get_dev_endpoints
|
||||||
@ -3372,7 +3372,7 @@
|
|||||||
- [ ] create_dev_endpoint
|
- [ ] create_dev_endpoint
|
||||||
- [ ] create_job
|
- [ ] create_job
|
||||||
- [ ] create_ml_transform
|
- [ ] create_ml_transform
|
||||||
- [ ] create_partition
|
- [X] create_partition
|
||||||
- [ ] create_script
|
- [ ] create_script
|
||||||
- [ ] create_security_configuration
|
- [ ] create_security_configuration
|
||||||
- [X] create_table
|
- [X] create_table
|
||||||
@ -3404,7 +3404,7 @@
|
|||||||
- [ ] get_crawlers
|
- [ ] get_crawlers
|
||||||
- [ ] get_data_catalog_encryption_settings
|
- [ ] get_data_catalog_encryption_settings
|
||||||
- [X] get_database
|
- [X] get_database
|
||||||
- [ ] get_databases
|
- [X] get_databases
|
||||||
- [ ] get_dataflow_graph
|
- [ ] get_dataflow_graph
|
||||||
- [ ] get_dev_endpoint
|
- [ ] get_dev_endpoint
|
||||||
- [ ] get_dev_endpoints
|
- [ ] get_dev_endpoints
|
||||||
@ -3418,7 +3418,7 @@
|
|||||||
- [ ] get_ml_task_runs
|
- [ ] get_ml_task_runs
|
||||||
- [ ] get_ml_transform
|
- [ ] get_ml_transform
|
||||||
- [ ] get_ml_transforms
|
- [ ] get_ml_transforms
|
||||||
- [ ] get_partition
|
- [X] get_partition
|
||||||
- [ ] get_partitions
|
- [ ] get_partitions
|
||||||
- [ ] get_plan
|
- [ ] get_plan
|
||||||
- [ ] get_resource_policy
|
- [ ] get_resource_policy
|
||||||
@ -3470,8 +3470,8 @@
|
|||||||
- [ ] update_dev_endpoint
|
- [ ] update_dev_endpoint
|
||||||
- [ ] update_job
|
- [ ] update_job
|
||||||
- [ ] update_ml_transform
|
- [ ] update_ml_transform
|
||||||
- [ ] update_partition
|
- [X] update_partition
|
||||||
- [ ] update_table
|
- [X] update_table
|
||||||
- [ ] update_trigger
|
- [ ] update_trigger
|
||||||
- [ ] update_user_defined_function
|
- [ ] update_user_defined_function
|
||||||
- [ ] update_workflow
|
- [ ] update_workflow
|
||||||
|
@ -21,6 +21,7 @@ from .datasync import mock_datasync # noqa
|
|||||||
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa
|
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa
|
||||||
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa
|
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa
|
||||||
from .dynamodbstreams import mock_dynamodbstreams # noqa
|
from .dynamodbstreams import mock_dynamodbstreams # noqa
|
||||||
|
from .elasticbeanstalk import mock_elasticbeanstalk # noqa
|
||||||
from .ec2 import mock_ec2, mock_ec2_deprecated # noqa
|
from .ec2 import mock_ec2, mock_ec2_deprecated # noqa
|
||||||
from .ec2_instance_connect import mock_ec2_instance_connect # noqa
|
from .ec2_instance_connect import mock_ec2_instance_connect # noqa
|
||||||
from .ecr import mock_ecr, mock_ecr_deprecated # noqa
|
from .ecr import mock_ecr, mock_ecr_deprecated # noqa
|
||||||
|
@ -461,6 +461,7 @@ class RestAPI(BaseModel):
|
|||||||
self.description = description
|
self.description = description
|
||||||
self.create_date = int(time.time())
|
self.create_date = int(time.time())
|
||||||
self.api_key_source = kwargs.get("api_key_source") or "HEADER"
|
self.api_key_source = kwargs.get("api_key_source") or "HEADER"
|
||||||
|
self.policy = kwargs.get("policy") or None
|
||||||
self.endpoint_configuration = kwargs.get("endpoint_configuration") or {
|
self.endpoint_configuration = kwargs.get("endpoint_configuration") or {
|
||||||
"types": ["EDGE"]
|
"types": ["EDGE"]
|
||||||
}
|
}
|
||||||
@ -485,6 +486,7 @@ class RestAPI(BaseModel):
|
|||||||
"apiKeySource": self.api_key_source,
|
"apiKeySource": self.api_key_source,
|
||||||
"endpointConfiguration": self.endpoint_configuration,
|
"endpointConfiguration": self.endpoint_configuration,
|
||||||
"tags": self.tags,
|
"tags": self.tags,
|
||||||
|
"policy": self.policy,
|
||||||
}
|
}
|
||||||
|
|
||||||
def add_child(self, path, parent_id=None):
|
def add_child(self, path, parent_id=None):
|
||||||
@ -713,6 +715,7 @@ class APIGatewayBackend(BaseBackend):
|
|||||||
api_key_source=None,
|
api_key_source=None,
|
||||||
endpoint_configuration=None,
|
endpoint_configuration=None,
|
||||||
tags=None,
|
tags=None,
|
||||||
|
policy=None,
|
||||||
):
|
):
|
||||||
api_id = create_id()
|
api_id = create_id()
|
||||||
rest_api = RestAPI(
|
rest_api = RestAPI(
|
||||||
@ -723,6 +726,7 @@ class APIGatewayBackend(BaseBackend):
|
|||||||
api_key_source=api_key_source,
|
api_key_source=api_key_source,
|
||||||
endpoint_configuration=endpoint_configuration,
|
endpoint_configuration=endpoint_configuration,
|
||||||
tags=tags,
|
tags=tags,
|
||||||
|
policy=policy,
|
||||||
)
|
)
|
||||||
self.apis[api_id] = rest_api
|
self.apis[api_id] = rest_api
|
||||||
return rest_api
|
return rest_api
|
||||||
|
@ -59,6 +59,7 @@ class APIGatewayResponse(BaseResponse):
|
|||||||
api_key_source = self._get_param("apiKeySource")
|
api_key_source = self._get_param("apiKeySource")
|
||||||
endpoint_configuration = self._get_param("endpointConfiguration")
|
endpoint_configuration = self._get_param("endpointConfiguration")
|
||||||
tags = self._get_param("tags")
|
tags = self._get_param("tags")
|
||||||
|
policy = self._get_param("policy")
|
||||||
|
|
||||||
# Param validation
|
# Param validation
|
||||||
if api_key_source and api_key_source not in API_KEY_SOURCES:
|
if api_key_source and api_key_source not in API_KEY_SOURCES:
|
||||||
@ -94,6 +95,7 @@ class APIGatewayResponse(BaseResponse):
|
|||||||
api_key_source=api_key_source,
|
api_key_source=api_key_source,
|
||||||
endpoint_configuration=endpoint_configuration,
|
endpoint_configuration=endpoint_configuration,
|
||||||
tags=tags,
|
tags=tags,
|
||||||
|
policy=policy,
|
||||||
)
|
)
|
||||||
return 200, {}, json.dumps(rest_api.to_dict())
|
return 200, {}, json.dumps(rest_api.to_dict())
|
||||||
|
|
||||||
|
@ -1006,11 +1006,11 @@ class LambdaBackend(BaseBackend):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def add_policy_statement(self, function_name, raw):
|
def add_permission(self, function_name, raw):
|
||||||
fn = self.get_function(function_name)
|
fn = self.get_function(function_name)
|
||||||
fn.policy.add_statement(raw)
|
fn.policy.add_statement(raw)
|
||||||
|
|
||||||
def del_policy_statement(self, function_name, sid, revision=""):
|
def remove_permission(self, function_name, sid, revision=""):
|
||||||
fn = self.get_function(function_name)
|
fn = self.get_function(function_name)
|
||||||
fn.policy.del_statement(sid, revision)
|
fn.policy.del_statement(sid, revision)
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ class LambdaResponse(BaseResponse):
|
|||||||
function_name = path.split("/")[-2]
|
function_name = path.split("/")[-2]
|
||||||
if self.lambda_backend.get_function(function_name):
|
if self.lambda_backend.get_function(function_name):
|
||||||
statement = self.body
|
statement = self.body
|
||||||
self.lambda_backend.add_policy_statement(function_name, statement)
|
self.lambda_backend.add_permission(function_name, statement)
|
||||||
return 200, {}, json.dumps({"Statement": statement})
|
return 200, {}, json.dumps({"Statement": statement})
|
||||||
else:
|
else:
|
||||||
return 404, {}, "{}"
|
return 404, {}, "{}"
|
||||||
@ -166,9 +166,7 @@ class LambdaResponse(BaseResponse):
|
|||||||
statement_id = path.split("/")[-1].split("?")[0]
|
statement_id = path.split("/")[-1].split("?")[0]
|
||||||
revision = querystring.get("RevisionId", "")
|
revision = querystring.get("RevisionId", "")
|
||||||
if self.lambda_backend.get_function(function_name):
|
if self.lambda_backend.get_function(function_name):
|
||||||
self.lambda_backend.del_policy_statement(
|
self.lambda_backend.remove_permission(function_name, statement_id, revision)
|
||||||
function_name, statement_id, revision
|
|
||||||
)
|
|
||||||
return 204, {}, "{}"
|
return 204, {}, "{}"
|
||||||
else:
|
else:
|
||||||
return 404, {}, "{}"
|
return 404, {}, "{}"
|
||||||
|
@ -23,6 +23,7 @@ from moto.ec2 import ec2_backends
|
|||||||
from moto.ec2_instance_connect import ec2_instance_connect_backends
|
from moto.ec2_instance_connect import ec2_instance_connect_backends
|
||||||
from moto.ecr import ecr_backends
|
from moto.ecr import ecr_backends
|
||||||
from moto.ecs import ecs_backends
|
from moto.ecs import ecs_backends
|
||||||
|
from moto.elasticbeanstalk import eb_backends
|
||||||
from moto.elb import elb_backends
|
from moto.elb import elb_backends
|
||||||
from moto.elbv2 import elbv2_backends
|
from moto.elbv2 import elbv2_backends
|
||||||
from moto.emr import emr_backends
|
from moto.emr import emr_backends
|
||||||
@ -77,6 +78,7 @@ BACKENDS = {
|
|||||||
"ec2_instance_connect": ec2_instance_connect_backends,
|
"ec2_instance_connect": ec2_instance_connect_backends,
|
||||||
"ecr": ecr_backends,
|
"ecr": ecr_backends,
|
||||||
"ecs": ecs_backends,
|
"ecs": ecs_backends,
|
||||||
|
"elasticbeanstalk": eb_backends,
|
||||||
"elb": elb_backends,
|
"elb": elb_backends,
|
||||||
"elbv2": elbv2_backends,
|
"elbv2": elbv2_backends,
|
||||||
"events": events_backends,
|
"events": events_backends,
|
||||||
|
@ -239,8 +239,11 @@ class FakeStack(BaseModel):
|
|||||||
self.cross_stack_resources = cross_stack_resources or {}
|
self.cross_stack_resources = cross_stack_resources or {}
|
||||||
self.resource_map = self._create_resource_map()
|
self.resource_map = self._create_resource_map()
|
||||||
self.output_map = self._create_output_map()
|
self.output_map = self._create_output_map()
|
||||||
self._add_stack_event("CREATE_COMPLETE")
|
if create_change_set:
|
||||||
self.status = "CREATE_COMPLETE"
|
self.status = "REVIEW_IN_PROGRESS"
|
||||||
|
else:
|
||||||
|
self.create_resources()
|
||||||
|
self._add_stack_event("CREATE_COMPLETE")
|
||||||
self.creation_time = datetime.utcnow()
|
self.creation_time = datetime.utcnow()
|
||||||
|
|
||||||
def _create_resource_map(self):
|
def _create_resource_map(self):
|
||||||
@ -253,7 +256,7 @@ class FakeStack(BaseModel):
|
|||||||
self.template_dict,
|
self.template_dict,
|
||||||
self.cross_stack_resources,
|
self.cross_stack_resources,
|
||||||
)
|
)
|
||||||
resource_map.create()
|
resource_map.load()
|
||||||
return resource_map
|
return resource_map
|
||||||
|
|
||||||
def _create_output_map(self):
|
def _create_output_map(self):
|
||||||
@ -326,6 +329,10 @@ class FakeStack(BaseModel):
|
|||||||
def exports(self):
|
def exports(self):
|
||||||
return self.output_map.exports
|
return self.output_map.exports
|
||||||
|
|
||||||
|
def create_resources(self):
|
||||||
|
self.resource_map.create()
|
||||||
|
self.status = "CREATE_COMPLETE"
|
||||||
|
|
||||||
def update(self, template, role_arn=None, parameters=None, tags=None):
|
def update(self, template, role_arn=None, parameters=None, tags=None):
|
||||||
self._add_stack_event(
|
self._add_stack_event(
|
||||||
"UPDATE_IN_PROGRESS", resource_status_reason="User Initiated"
|
"UPDATE_IN_PROGRESS", resource_status_reason="User Initiated"
|
||||||
@ -640,6 +647,7 @@ class CloudFormationBackend(BaseBackend):
|
|||||||
else:
|
else:
|
||||||
stack._add_stack_event("UPDATE_IN_PROGRESS")
|
stack._add_stack_event("UPDATE_IN_PROGRESS")
|
||||||
stack._add_stack_event("UPDATE_COMPLETE")
|
stack._add_stack_event("UPDATE_COMPLETE")
|
||||||
|
stack.create_resources()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def describe_stacks(self, name_or_stack_id):
|
def describe_stacks(self, name_or_stack_id):
|
||||||
|
@ -531,14 +531,16 @@ class ResourceMap(collections_abc.Mapping):
|
|||||||
for condition_name in self.lazy_condition_map:
|
for condition_name in self.lazy_condition_map:
|
||||||
self.lazy_condition_map[condition_name]
|
self.lazy_condition_map[condition_name]
|
||||||
|
|
||||||
def create(self):
|
def load(self):
|
||||||
self.load_mapping()
|
self.load_mapping()
|
||||||
self.transform_mapping()
|
self.transform_mapping()
|
||||||
self.load_parameters()
|
self.load_parameters()
|
||||||
self.load_conditions()
|
self.load_conditions()
|
||||||
|
|
||||||
|
def create(self):
|
||||||
# Since this is a lazy map, to create every object we just need to
|
# Since this is a lazy map, to create every object we just need to
|
||||||
# iterate through self.
|
# iterate through self.
|
||||||
|
# Assumes that self.load() has been called before
|
||||||
self.tags.update(
|
self.tags.update(
|
||||||
{
|
{
|
||||||
"aws:cloudformation:stack-name": self.get("AWS::StackName"),
|
"aws:cloudformation:stack-name": self.get("AWS::StackName"),
|
||||||
|
@ -22,6 +22,14 @@ class Dimension(object):
|
|||||||
self.name = name
|
self.name = name
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
||||||
|
def __eq__(self, item):
|
||||||
|
if isinstance(item, Dimension):
|
||||||
|
return self.name == item.name and self.value == item.value
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ne__(self, item): # Only needed on Py2; Py3 defines it implicitly
|
||||||
|
return self != item
|
||||||
|
|
||||||
|
|
||||||
def daterange(start, stop, step=timedelta(days=1), inclusive=False):
|
def daterange(start, stop, step=timedelta(days=1), inclusive=False):
|
||||||
"""
|
"""
|
||||||
@ -124,6 +132,17 @@ class MetricDatum(BaseModel):
|
|||||||
Dimension(dimension["Name"], dimension["Value"]) for dimension in dimensions
|
Dimension(dimension["Name"], dimension["Value"]) for dimension in dimensions
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def filter(self, namespace, name, dimensions):
|
||||||
|
if namespace and namespace != self.namespace:
|
||||||
|
return False
|
||||||
|
if name and name != self.name:
|
||||||
|
return False
|
||||||
|
if dimensions and any(
|
||||||
|
Dimension(d["Name"], d["Value"]) not in self.dimensions for d in dimensions
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class Dashboard(BaseModel):
|
class Dashboard(BaseModel):
|
||||||
def __init__(self, name, body):
|
def __init__(self, name, body):
|
||||||
@ -202,6 +221,15 @@ class CloudWatchBackend(BaseBackend):
|
|||||||
self.metric_data = []
|
self.metric_data = []
|
||||||
self.paged_metric_data = {}
|
self.paged_metric_data = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
# Retrieve a list of all OOTB metrics that are provided by metrics providers
|
||||||
|
# Computed on the fly
|
||||||
|
def aws_metric_data(self):
|
||||||
|
md = []
|
||||||
|
for name, service in metric_providers.items():
|
||||||
|
md.extend(service.get_cloudwatch_metrics())
|
||||||
|
return md
|
||||||
|
|
||||||
def put_metric_alarm(
|
def put_metric_alarm(
|
||||||
self,
|
self,
|
||||||
name,
|
name,
|
||||||
@ -295,6 +323,43 @@ class CloudWatchBackend(BaseBackend):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_metric_data(self, queries, start_time, end_time):
|
||||||
|
period_data = [
|
||||||
|
md for md in self.metric_data if start_time <= md.timestamp <= end_time
|
||||||
|
]
|
||||||
|
results = []
|
||||||
|
for query in queries:
|
||||||
|
query_ns = query["metric_stat._metric._namespace"]
|
||||||
|
query_name = query["metric_stat._metric._metric_name"]
|
||||||
|
query_data = [
|
||||||
|
md
|
||||||
|
for md in period_data
|
||||||
|
if md.namespace == query_ns and md.name == query_name
|
||||||
|
]
|
||||||
|
metric_values = [m.value for m in query_data]
|
||||||
|
result_vals = []
|
||||||
|
stat = query["metric_stat._stat"]
|
||||||
|
if len(metric_values) > 0:
|
||||||
|
if stat == "Average":
|
||||||
|
result_vals.append(sum(metric_values) / len(metric_values))
|
||||||
|
elif stat == "Minimum":
|
||||||
|
result_vals.append(min(metric_values))
|
||||||
|
elif stat == "Maximum":
|
||||||
|
result_vals.append(max(metric_values))
|
||||||
|
elif stat == "Sum":
|
||||||
|
result_vals.append(sum(metric_values))
|
||||||
|
|
||||||
|
label = query["metric_stat._metric._metric_name"] + " " + stat
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
"id": query["id"],
|
||||||
|
"label": label,
|
||||||
|
"vals": result_vals,
|
||||||
|
"timestamps": [datetime.now() for _ in result_vals],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return results
|
||||||
|
|
||||||
def get_metric_statistics(
|
def get_metric_statistics(
|
||||||
self, namespace, metric_name, start_time, end_time, period, stats
|
self, namespace, metric_name, start_time, end_time, period, stats
|
||||||
):
|
):
|
||||||
@ -334,7 +399,7 @@ class CloudWatchBackend(BaseBackend):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
def get_all_metrics(self):
|
def get_all_metrics(self):
|
||||||
return self.metric_data
|
return self.metric_data + self.aws_metric_data
|
||||||
|
|
||||||
def put_dashboard(self, name, body):
|
def put_dashboard(self, name, body):
|
||||||
self.dashboards[name] = Dashboard(name, body)
|
self.dashboards[name] = Dashboard(name, body)
|
||||||
@ -386,7 +451,7 @@ class CloudWatchBackend(BaseBackend):
|
|||||||
|
|
||||||
self.alarms[alarm_name].update_state(reason, reason_data, state_value)
|
self.alarms[alarm_name].update_state(reason, reason_data, state_value)
|
||||||
|
|
||||||
def list_metrics(self, next_token, namespace, metric_name):
|
def list_metrics(self, next_token, namespace, metric_name, dimensions):
|
||||||
if next_token:
|
if next_token:
|
||||||
if next_token not in self.paged_metric_data:
|
if next_token not in self.paged_metric_data:
|
||||||
raise RESTError(
|
raise RESTError(
|
||||||
@ -397,15 +462,16 @@ class CloudWatchBackend(BaseBackend):
|
|||||||
del self.paged_metric_data[next_token] # Cant reuse same token twice
|
del self.paged_metric_data[next_token] # Cant reuse same token twice
|
||||||
return self._get_paginated(metrics)
|
return self._get_paginated(metrics)
|
||||||
else:
|
else:
|
||||||
metrics = self.get_filtered_metrics(metric_name, namespace)
|
metrics = self.get_filtered_metrics(metric_name, namespace, dimensions)
|
||||||
return self._get_paginated(metrics)
|
return self._get_paginated(metrics)
|
||||||
|
|
||||||
def get_filtered_metrics(self, metric_name, namespace):
|
def get_filtered_metrics(self, metric_name, namespace, dimensions):
|
||||||
metrics = self.get_all_metrics()
|
metrics = self.get_all_metrics()
|
||||||
if namespace:
|
metrics = [
|
||||||
metrics = [md for md in metrics if md.namespace == namespace]
|
md
|
||||||
if metric_name:
|
for md in metrics
|
||||||
metrics = [md for md in metrics if md.name == metric_name]
|
if md.filter(namespace=namespace, name=metric_name, dimensions=dimensions)
|
||||||
|
]
|
||||||
return metrics
|
return metrics
|
||||||
|
|
||||||
def _get_paginated(self, metrics):
|
def _get_paginated(self, metrics):
|
||||||
@ -445,3 +511,8 @@ for region in Session().get_available_regions(
|
|||||||
cloudwatch_backends[region] = CloudWatchBackend()
|
cloudwatch_backends[region] = CloudWatchBackend()
|
||||||
for region in Session().get_available_regions("cloudwatch", partition_name="aws-cn"):
|
for region in Session().get_available_regions("cloudwatch", partition_name="aws-cn"):
|
||||||
cloudwatch_backends[region] = CloudWatchBackend()
|
cloudwatch_backends[region] = CloudWatchBackend()
|
||||||
|
|
||||||
|
# List of services that provide OOTB CW metrics
|
||||||
|
# See the S3Backend constructor for an example
|
||||||
|
# TODO: We might have to separate this out per region for non-global services
|
||||||
|
metric_providers = {}
|
||||||
|
@ -92,6 +92,18 @@ class CloudWatchResponse(BaseResponse):
|
|||||||
template = self.response_template(PUT_METRIC_DATA_TEMPLATE)
|
template = self.response_template(PUT_METRIC_DATA_TEMPLATE)
|
||||||
return template.render()
|
return template.render()
|
||||||
|
|
||||||
|
@amzn_request_id
|
||||||
|
def get_metric_data(self):
|
||||||
|
start = dtparse(self._get_param("StartTime"))
|
||||||
|
end = dtparse(self._get_param("EndTime"))
|
||||||
|
queries = self._get_list_prefix("MetricDataQueries.member")
|
||||||
|
results = self.cloudwatch_backend.get_metric_data(
|
||||||
|
start_time=start, end_time=end, queries=queries
|
||||||
|
)
|
||||||
|
|
||||||
|
template = self.response_template(GET_METRIC_DATA_TEMPLATE)
|
||||||
|
return template.render(results=results)
|
||||||
|
|
||||||
@amzn_request_id
|
@amzn_request_id
|
||||||
def get_metric_statistics(self):
|
def get_metric_statistics(self):
|
||||||
namespace = self._get_param("Namespace")
|
namespace = self._get_param("Namespace")
|
||||||
@ -124,9 +136,10 @@ class CloudWatchResponse(BaseResponse):
|
|||||||
def list_metrics(self):
|
def list_metrics(self):
|
||||||
namespace = self._get_param("Namespace")
|
namespace = self._get_param("Namespace")
|
||||||
metric_name = self._get_param("MetricName")
|
metric_name = self._get_param("MetricName")
|
||||||
|
dimensions = self._get_multi_param("Dimensions.member")
|
||||||
next_token = self._get_param("NextToken")
|
next_token = self._get_param("NextToken")
|
||||||
next_token, metrics = self.cloudwatch_backend.list_metrics(
|
next_token, metrics = self.cloudwatch_backend.list_metrics(
|
||||||
next_token, namespace, metric_name
|
next_token, namespace, metric_name, dimensions
|
||||||
)
|
)
|
||||||
template = self.response_template(LIST_METRICS_TEMPLATE)
|
template = self.response_template(LIST_METRICS_TEMPLATE)
|
||||||
return template.render(metrics=metrics, next_token=next_token)
|
return template.render(metrics=metrics, next_token=next_token)
|
||||||
@ -285,6 +298,35 @@ PUT_METRIC_DATA_TEMPLATE = """<PutMetricDataResponse xmlns="http://monitoring.am
|
|||||||
</ResponseMetadata>
|
</ResponseMetadata>
|
||||||
</PutMetricDataResponse>"""
|
</PutMetricDataResponse>"""
|
||||||
|
|
||||||
|
GET_METRIC_DATA_TEMPLATE = """<GetMetricDataResponse xmlns="http://monitoring.amazonaws.com/doc/2010-08-01/">
|
||||||
|
<ResponseMetadata>
|
||||||
|
<RequestId>
|
||||||
|
{{ request_id }}
|
||||||
|
</RequestId>
|
||||||
|
</ResponseMetadata>
|
||||||
|
<GetMetricDataResult>
|
||||||
|
<MetricDataResults>
|
||||||
|
{% for result in results %}
|
||||||
|
<member>
|
||||||
|
<Id>{{ result.id }}</Id>
|
||||||
|
<Label>{{ result.label }}</Label>
|
||||||
|
<StatusCode>Complete</StatusCode>
|
||||||
|
<Timestamps>
|
||||||
|
{% for val in result.timestamps %}
|
||||||
|
<member>{{ val }}</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Timestamps>
|
||||||
|
<Values>
|
||||||
|
{% for val in result.vals %}
|
||||||
|
<member>{{ val }}</member>
|
||||||
|
{% endfor %}
|
||||||
|
</Values>
|
||||||
|
</member>
|
||||||
|
{% endfor %}
|
||||||
|
</MetricDataResults>
|
||||||
|
</GetMetricDataResult>
|
||||||
|
</GetMetricDataResponse>"""
|
||||||
|
|
||||||
GET_METRIC_STATISTICS_TEMPLATE = """<GetMetricStatisticsResponse xmlns="http://monitoring.amazonaws.com/doc/2010-08-01/">
|
GET_METRIC_STATISTICS_TEMPLATE = """<GetMetricStatisticsResponse xmlns="http://monitoring.amazonaws.com/doc/2010-08-01/">
|
||||||
<ResponseMetadata>
|
<ResponseMetadata>
|
||||||
<RequestId>
|
<RequestId>
|
||||||
@ -342,7 +384,7 @@ LIST_METRICS_TEMPLATE = """<ListMetricsResponse xmlns="http://monitoring.amazona
|
|||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</Dimensions>
|
</Dimensions>
|
||||||
<MetricName>{{ metric.name }}</MetricName>
|
<MetricName>Metric:{{ metric.name }}</MetricName>
|
||||||
<Namespace>{{ metric.namespace }}</Namespace>
|
<Namespace>{{ metric.namespace }}</Namespace>
|
||||||
</member>
|
</member>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from moto.core.utils import get_random_hex
|
from uuid import uuid4
|
||||||
|
|
||||||
|
|
||||||
def get_random_identity_id(region):
|
def get_random_identity_id(region):
|
||||||
return "{0}:{1}".format(region, get_random_hex(length=19))
|
return "{0}:{1}".format(region, uuid4())
|
||||||
|
@ -12,6 +12,8 @@ from io import BytesIO
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from botocore.handlers import BUILTIN_HANDLERS
|
from botocore.handlers import BUILTIN_HANDLERS
|
||||||
from botocore.awsrequest import AWSResponse
|
from botocore.awsrequest import AWSResponse
|
||||||
|
from six.moves.urllib.parse import urlparse
|
||||||
|
from werkzeug.wrappers import Request
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
from moto import settings
|
from moto import settings
|
||||||
@ -175,6 +177,26 @@ class CallbackResponse(responses.CallbackResponse):
|
|||||||
"""
|
"""
|
||||||
Need to override this so we can pass decode_content=False
|
Need to override this so we can pass decode_content=False
|
||||||
"""
|
"""
|
||||||
|
if not isinstance(request, Request):
|
||||||
|
url = urlparse(request.url)
|
||||||
|
if request.body is None:
|
||||||
|
body = None
|
||||||
|
elif isinstance(request.body, six.text_type):
|
||||||
|
body = six.BytesIO(six.b(request.body))
|
||||||
|
else:
|
||||||
|
body = six.BytesIO(request.body)
|
||||||
|
req = Request.from_values(
|
||||||
|
path="?".join([url.path, url.query]),
|
||||||
|
input_stream=body,
|
||||||
|
content_length=request.headers.get("Content-Length"),
|
||||||
|
content_type=request.headers.get("Content-Type"),
|
||||||
|
method=request.method,
|
||||||
|
base_url="{scheme}://{netloc}".format(
|
||||||
|
scheme=url.scheme, netloc=url.netloc
|
||||||
|
),
|
||||||
|
headers=[(k, v) for k, v in six.iteritems(request.headers)],
|
||||||
|
)
|
||||||
|
request = req
|
||||||
headers = self.get_headers()
|
headers = self.get_headers()
|
||||||
|
|
||||||
result = self.callback(request)
|
result = self.callback(request)
|
||||||
|
@ -328,3 +328,25 @@ def py2_strip_unicode_keys(blob):
|
|||||||
blob = new_set
|
blob = new_set
|
||||||
|
|
||||||
return blob
|
return blob
|
||||||
|
|
||||||
|
|
||||||
|
def tags_from_query_string(
|
||||||
|
querystring_dict, prefix="Tag", key_suffix="Key", value_suffix="Value"
|
||||||
|
):
|
||||||
|
response_values = {}
|
||||||
|
for key, value in querystring_dict.items():
|
||||||
|
if key.startswith(prefix) and key.endswith(key_suffix):
|
||||||
|
tag_index = key.replace(prefix + ".", "").replace("." + key_suffix, "")
|
||||||
|
tag_key = querystring_dict.get(
|
||||||
|
"{prefix}.{index}.{key_suffix}".format(
|
||||||
|
prefix=prefix, index=tag_index, key_suffix=key_suffix,
|
||||||
|
)
|
||||||
|
)[0]
|
||||||
|
tag_value_key = "{prefix}.{index}.{value_suffix}".format(
|
||||||
|
prefix=prefix, index=tag_index, value_suffix=value_suffix,
|
||||||
|
)
|
||||||
|
if tag_value_key in querystring_dict:
|
||||||
|
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
|
||||||
|
else:
|
||||||
|
response_values[tag_key] = None
|
||||||
|
return response_values
|
||||||
|
@ -74,6 +74,9 @@ class Item(BaseModel):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "Item: {0}".format(self.to_json())
|
return "Item: {0}".format(self.to_json())
|
||||||
|
|
||||||
|
def size(self):
|
||||||
|
return sum(bytesize(key) + value.size() for key, value in self.attrs.items())
|
||||||
|
|
||||||
def to_json(self):
|
def to_json(self):
|
||||||
attributes = {}
|
attributes = {}
|
||||||
for attribute_key, attribute in self.attrs.items():
|
for attribute_key, attribute in self.attrs.items():
|
||||||
@ -921,6 +924,14 @@ class Table(BaseModel):
|
|||||||
break
|
break
|
||||||
|
|
||||||
last_evaluated_key = None
|
last_evaluated_key = None
|
||||||
|
size_limit = 1000000 # DynamoDB has a 1MB size limit
|
||||||
|
item_size = sum(res.size() for res in results)
|
||||||
|
if item_size > size_limit:
|
||||||
|
item_size = idx = 0
|
||||||
|
while item_size + results[idx].size() < size_limit:
|
||||||
|
item_size += results[idx].size()
|
||||||
|
idx += 1
|
||||||
|
limit = min(limit, idx) if limit else idx
|
||||||
if limit and len(results) > limit:
|
if limit and len(results) > limit:
|
||||||
results = results[:limit]
|
results = results[:limit]
|
||||||
last_evaluated_key = {self.hash_key_attr: results[-1].hash_key}
|
last_evaluated_key = {self.hash_key_attr: results[-1].hash_key}
|
||||||
|
@ -92,16 +92,24 @@ class DynamoHandler(BaseResponse):
|
|||||||
def list_tables(self):
|
def list_tables(self):
|
||||||
body = self.body
|
body = self.body
|
||||||
limit = body.get("Limit", 100)
|
limit = body.get("Limit", 100)
|
||||||
if body.get("ExclusiveStartTableName"):
|
all_tables = list(self.dynamodb_backend.tables.keys())
|
||||||
last = body.get("ExclusiveStartTableName")
|
|
||||||
start = list(self.dynamodb_backend.tables.keys()).index(last) + 1
|
exclusive_start_table_name = body.get("ExclusiveStartTableName")
|
||||||
|
if exclusive_start_table_name:
|
||||||
|
try:
|
||||||
|
last_table_index = all_tables.index(exclusive_start_table_name)
|
||||||
|
except ValueError:
|
||||||
|
start = len(all_tables)
|
||||||
|
else:
|
||||||
|
start = last_table_index + 1
|
||||||
else:
|
else:
|
||||||
start = 0
|
start = 0
|
||||||
all_tables = list(self.dynamodb_backend.tables.keys())
|
|
||||||
if limit:
|
if limit:
|
||||||
tables = all_tables[start : start + limit]
|
tables = all_tables[start : start + limit]
|
||||||
else:
|
else:
|
||||||
tables = all_tables[start:]
|
tables = all_tables[start:]
|
||||||
|
|
||||||
response = {"TableNames": tables}
|
response = {"TableNames": tables}
|
||||||
if limit and len(all_tables) > start + limit:
|
if limit and len(all_tables) > start + limit:
|
||||||
response["LastEvaluatedTableName"] = tables[-1]
|
response["LastEvaluatedTableName"] = tables[-1]
|
||||||
@ -462,8 +470,10 @@ class DynamoHandler(BaseResponse):
|
|||||||
for k, v in six.iteritems(self.body.get("ExpressionAttributeNames", {}))
|
for k, v in six.iteritems(self.body.get("ExpressionAttributeNames", {}))
|
||||||
)
|
)
|
||||||
|
|
||||||
if " AND " in key_condition_expression:
|
if " and " in key_condition_expression.lower():
|
||||||
expressions = key_condition_expression.split(" AND ", 1)
|
expressions = re.split(
|
||||||
|
" AND ", key_condition_expression, maxsplit=1, flags=re.IGNORECASE
|
||||||
|
)
|
||||||
|
|
||||||
index_hash_key = [key for key in index if key["KeyType"] == "HASH"][0]
|
index_hash_key = [key for key in index if key["KeyType"] == "HASH"][0]
|
||||||
hash_key_var = reverse_attribute_lookup.get(
|
hash_key_var = reverse_attribute_lookup.get(
|
||||||
|
@ -557,6 +557,10 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
|||||||
# worst case we'll get IP address exaustion... rarely
|
# worst case we'll get IP address exaustion... rarely
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def add_block_device(self, size, device_path):
|
||||||
|
volume = self.ec2_backend.create_volume(size, self.region_name)
|
||||||
|
self.ec2_backend.attach_volume(volume.id, self.id, device_path)
|
||||||
|
|
||||||
def setup_defaults(self):
|
def setup_defaults(self):
|
||||||
# Default have an instance with root volume should you not wish to
|
# Default have an instance with root volume should you not wish to
|
||||||
# override with attach volume cmd.
|
# override with attach volume cmd.
|
||||||
@ -564,9 +568,10 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
|||||||
self.ec2_backend.attach_volume(volume.id, self.id, "/dev/sda1")
|
self.ec2_backend.attach_volume(volume.id, self.id, "/dev/sda1")
|
||||||
|
|
||||||
def teardown_defaults(self):
|
def teardown_defaults(self):
|
||||||
volume_id = self.block_device_mapping["/dev/sda1"].volume_id
|
if "/dev/sda1" in self.block_device_mapping:
|
||||||
self.ec2_backend.detach_volume(volume_id, self.id, "/dev/sda1")
|
volume_id = self.block_device_mapping["/dev/sda1"].volume_id
|
||||||
self.ec2_backend.delete_volume(volume_id)
|
self.ec2_backend.detach_volume(volume_id, self.id, "/dev/sda1")
|
||||||
|
self.ec2_backend.delete_volume(volume_id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def get_block_device_mapping(self):
|
def get_block_device_mapping(self):
|
||||||
@ -621,6 +626,7 @@ class Instance(TaggedEC2Resource, BotoInstance):
|
|||||||
subnet_id=properties.get("SubnetId"),
|
subnet_id=properties.get("SubnetId"),
|
||||||
key_name=properties.get("KeyName"),
|
key_name=properties.get("KeyName"),
|
||||||
private_ip=properties.get("PrivateIpAddress"),
|
private_ip=properties.get("PrivateIpAddress"),
|
||||||
|
block_device_mappings=properties.get("BlockDeviceMappings", {}),
|
||||||
)
|
)
|
||||||
instance = reservation.instances[0]
|
instance = reservation.instances[0]
|
||||||
for tag in properties.get("Tags", []):
|
for tag in properties.get("Tags", []):
|
||||||
@ -880,7 +886,14 @@ class InstanceBackend(object):
|
|||||||
)
|
)
|
||||||
new_reservation.instances.append(new_instance)
|
new_reservation.instances.append(new_instance)
|
||||||
new_instance.add_tags(instance_tags)
|
new_instance.add_tags(instance_tags)
|
||||||
new_instance.setup_defaults()
|
if "block_device_mappings" in kwargs:
|
||||||
|
for block_device in kwargs["block_device_mappings"]:
|
||||||
|
new_instance.add_block_device(
|
||||||
|
block_device["Ebs"]["VolumeSize"], block_device["DeviceName"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
new_instance.setup_defaults()
|
||||||
|
|
||||||
return new_reservation
|
return new_reservation
|
||||||
|
|
||||||
def start_instances(self, instance_ids):
|
def start_instances(self, instance_ids):
|
||||||
@ -1512,6 +1525,11 @@ class RegionsAndZonesBackend(object):
|
|||||||
regions.append(Region(region, "ec2.{}.amazonaws.com.cn".format(region)))
|
regions.append(Region(region, "ec2.{}.amazonaws.com.cn".format(region)))
|
||||||
|
|
||||||
zones = {
|
zones = {
|
||||||
|
"af-south-1": [
|
||||||
|
Zone(region_name="af-south-1", name="af-south-1a", zone_id="afs1-az1"),
|
||||||
|
Zone(region_name="af-south-1", name="af-south-1b", zone_id="afs1-az2"),
|
||||||
|
Zone(region_name="af-south-1", name="af-south-1c", zone_id="afs1-az3"),
|
||||||
|
],
|
||||||
"ap-south-1": [
|
"ap-south-1": [
|
||||||
Zone(region_name="ap-south-1", name="ap-south-1a", zone_id="aps1-az1"),
|
Zone(region_name="ap-south-1", name="ap-south-1a", zone_id="aps1-az1"),
|
||||||
Zone(region_name="ap-south-1", name="ap-south-1b", zone_id="aps1-az3"),
|
Zone(region_name="ap-south-1", name="ap-south-1b", zone_id="aps1-az3"),
|
||||||
|
@ -52,7 +52,7 @@ class InstanceResponse(BaseResponse):
|
|||||||
private_ip = self._get_param("PrivateIpAddress")
|
private_ip = self._get_param("PrivateIpAddress")
|
||||||
associate_public_ip = self._get_param("AssociatePublicIpAddress")
|
associate_public_ip = self._get_param("AssociatePublicIpAddress")
|
||||||
key_name = self._get_param("KeyName")
|
key_name = self._get_param("KeyName")
|
||||||
ebs_optimized = self._get_param("EbsOptimized")
|
ebs_optimized = self._get_param("EbsOptimized") or False
|
||||||
instance_initiated_shutdown_behavior = self._get_param(
|
instance_initiated_shutdown_behavior = self._get_param(
|
||||||
"InstanceInitiatedShutdownBehavior"
|
"InstanceInitiatedShutdownBehavior"
|
||||||
)
|
)
|
||||||
|
@ -2,7 +2,8 @@ from __future__ import unicode_literals
|
|||||||
|
|
||||||
from moto.core.responses import BaseResponse
|
from moto.core.responses import BaseResponse
|
||||||
from moto.ec2.models import validate_resource_ids
|
from moto.ec2.models import validate_resource_ids
|
||||||
from moto.ec2.utils import tags_from_query_string, filters_from_querystring
|
from moto.ec2.utils import filters_from_querystring
|
||||||
|
from moto.core.utils import tags_from_query_string
|
||||||
|
|
||||||
|
|
||||||
class TagResponse(BaseResponse):
|
class TagResponse(BaseResponse):
|
||||||
|
@ -196,22 +196,6 @@ def split_route_id(route_id):
|
|||||||
return values[0], values[1]
|
return values[0], values[1]
|
||||||
|
|
||||||
|
|
||||||
def tags_from_query_string(querystring_dict):
|
|
||||||
prefix = "Tag"
|
|
||||||
suffix = "Key"
|
|
||||||
response_values = {}
|
|
||||||
for key, value in querystring_dict.items():
|
|
||||||
if key.startswith(prefix) and key.endswith(suffix):
|
|
||||||
tag_index = key.replace(prefix + ".", "").replace("." + suffix, "")
|
|
||||||
tag_key = querystring_dict.get("Tag.{0}.Key".format(tag_index))[0]
|
|
||||||
tag_value_key = "Tag.{0}.Value".format(tag_index)
|
|
||||||
if tag_value_key in querystring_dict:
|
|
||||||
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
|
|
||||||
else:
|
|
||||||
response_values[tag_key] = None
|
|
||||||
return response_values
|
|
||||||
|
|
||||||
|
|
||||||
def dhcp_configuration_from_querystring(querystring, option="DhcpConfiguration"):
|
def dhcp_configuration_from_querystring(querystring, option="DhcpConfiguration"):
|
||||||
"""
|
"""
|
||||||
turn:
|
turn:
|
||||||
|
4
moto/elasticbeanstalk/__init__.py
Normal file
4
moto/elasticbeanstalk/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from .models import eb_backends
|
||||||
|
from moto.core.models import base_decorator
|
||||||
|
|
||||||
|
mock_elasticbeanstalk = base_decorator(eb_backends)
|
15
moto/elasticbeanstalk/exceptions.py
Normal file
15
moto/elasticbeanstalk/exceptions.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from moto.core.exceptions import RESTError
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidParameterValueError(RESTError):
|
||||||
|
def __init__(self, message):
|
||||||
|
super(InvalidParameterValueError, self).__init__(
|
||||||
|
"InvalidParameterValue", message
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceNotFoundException(RESTError):
|
||||||
|
def __init__(self, message):
|
||||||
|
super(ResourceNotFoundException, self).__init__(
|
||||||
|
"ResourceNotFoundException", message
|
||||||
|
)
|
152
moto/elasticbeanstalk/models.py
Normal file
152
moto/elasticbeanstalk/models.py
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
import weakref
|
||||||
|
|
||||||
|
from boto3 import Session
|
||||||
|
|
||||||
|
from moto.core import BaseBackend, BaseModel
|
||||||
|
from .exceptions import InvalidParameterValueError, ResourceNotFoundException
|
||||||
|
|
||||||
|
|
||||||
|
class FakeEnvironment(BaseModel):
|
||||||
|
def __init__(
|
||||||
|
self, application, environment_name, solution_stack_name, tags,
|
||||||
|
):
|
||||||
|
self.application = weakref.proxy(
|
||||||
|
application
|
||||||
|
) # weakref to break circular dependencies
|
||||||
|
self.environment_name = environment_name
|
||||||
|
self.solution_stack_name = solution_stack_name
|
||||||
|
self.tags = tags
|
||||||
|
|
||||||
|
@property
|
||||||
|
def application_name(self):
|
||||||
|
return self.application.application_name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def environment_arn(self):
|
||||||
|
return (
|
||||||
|
"arn:aws:elasticbeanstalk:{region}:{account_id}:"
|
||||||
|
"environment/{application_name}/{environment_name}".format(
|
||||||
|
region=self.region,
|
||||||
|
account_id="123456789012",
|
||||||
|
application_name=self.application_name,
|
||||||
|
environment_name=self.environment_name,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def platform_arn(self):
|
||||||
|
return "TODO" # TODO
|
||||||
|
|
||||||
|
@property
|
||||||
|
def region(self):
|
||||||
|
return self.application.region
|
||||||
|
|
||||||
|
|
||||||
|
class FakeApplication(BaseModel):
|
||||||
|
def __init__(self, backend, application_name):
|
||||||
|
self.backend = weakref.proxy(backend) # weakref to break cycles
|
||||||
|
self.application_name = application_name
|
||||||
|
self.environments = dict()
|
||||||
|
|
||||||
|
def create_environment(
|
||||||
|
self, environment_name, solution_stack_name, tags,
|
||||||
|
):
|
||||||
|
if environment_name in self.environments:
|
||||||
|
raise InvalidParameterValueError
|
||||||
|
|
||||||
|
env = FakeEnvironment(
|
||||||
|
application=self,
|
||||||
|
environment_name=environment_name,
|
||||||
|
solution_stack_name=solution_stack_name,
|
||||||
|
tags=tags,
|
||||||
|
)
|
||||||
|
self.environments[environment_name] = env
|
||||||
|
|
||||||
|
return env
|
||||||
|
|
||||||
|
@property
|
||||||
|
def region(self):
|
||||||
|
return self.backend.region
|
||||||
|
|
||||||
|
|
||||||
|
class EBBackend(BaseBackend):
|
||||||
|
def __init__(self, region):
|
||||||
|
self.region = region
|
||||||
|
self.applications = dict()
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
# preserve region
|
||||||
|
region = self.region
|
||||||
|
self._reset_model_refs()
|
||||||
|
self.__dict__ = {}
|
||||||
|
self.__init__(region)
|
||||||
|
|
||||||
|
def create_application(self, application_name):
|
||||||
|
if application_name in self.applications:
|
||||||
|
raise InvalidParameterValueError(
|
||||||
|
"Application {} already exists.".format(application_name)
|
||||||
|
)
|
||||||
|
new_app = FakeApplication(backend=self, application_name=application_name,)
|
||||||
|
self.applications[application_name] = new_app
|
||||||
|
return new_app
|
||||||
|
|
||||||
|
def create_environment(self, app, environment_name, stack_name, tags):
|
||||||
|
return app.create_environment(
|
||||||
|
environment_name=environment_name,
|
||||||
|
solution_stack_name=stack_name,
|
||||||
|
tags=tags,
|
||||||
|
)
|
||||||
|
|
||||||
|
def describe_environments(self):
|
||||||
|
envs = []
|
||||||
|
for app in self.applications.values():
|
||||||
|
for env in app.environments.values():
|
||||||
|
envs.append(env)
|
||||||
|
return envs
|
||||||
|
|
||||||
|
def list_available_solution_stacks(self):
|
||||||
|
# Implemented in response.py
|
||||||
|
pass
|
||||||
|
|
||||||
|
def update_tags_for_resource(self, resource_arn, tags_to_add, tags_to_remove):
|
||||||
|
try:
|
||||||
|
res = self._find_environment_by_arn(resource_arn)
|
||||||
|
except KeyError:
|
||||||
|
raise ResourceNotFoundException(
|
||||||
|
"Resource not found for ARN '{}'.".format(resource_arn)
|
||||||
|
)
|
||||||
|
|
||||||
|
for key, value in tags_to_add.items():
|
||||||
|
res.tags[key] = value
|
||||||
|
|
||||||
|
for key in tags_to_remove:
|
||||||
|
del res.tags[key]
|
||||||
|
|
||||||
|
def list_tags_for_resource(self, resource_arn):
|
||||||
|
try:
|
||||||
|
res = self._find_environment_by_arn(resource_arn)
|
||||||
|
except KeyError:
|
||||||
|
raise ResourceNotFoundException(
|
||||||
|
"Resource not found for ARN '{}'.".format(resource_arn)
|
||||||
|
)
|
||||||
|
return res.tags
|
||||||
|
|
||||||
|
def _find_environment_by_arn(self, arn):
|
||||||
|
for app in self.applications.keys():
|
||||||
|
for env in self.applications[app].environments.values():
|
||||||
|
if env.environment_arn == arn:
|
||||||
|
return env
|
||||||
|
raise KeyError()
|
||||||
|
|
||||||
|
|
||||||
|
eb_backends = {}
|
||||||
|
for region in Session().get_available_regions("elasticbeanstalk"):
|
||||||
|
eb_backends[region] = EBBackend(region)
|
||||||
|
for region in Session().get_available_regions(
|
||||||
|
"elasticbeanstalk", partition_name="aws-us-gov"
|
||||||
|
):
|
||||||
|
eb_backends[region] = EBBackend(region)
|
||||||
|
for region in Session().get_available_regions(
|
||||||
|
"elasticbeanstalk", partition_name="aws-cn"
|
||||||
|
):
|
||||||
|
eb_backends[region] = EBBackend(region)
|
1386
moto/elasticbeanstalk/responses.py
Normal file
1386
moto/elasticbeanstalk/responses.py
Normal file
File diff suppressed because it is too large
Load Diff
11
moto/elasticbeanstalk/urls.py
Normal file
11
moto/elasticbeanstalk/urls.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from .responses import EBResponse
|
||||||
|
|
||||||
|
url_bases = [
|
||||||
|
r"https?://elasticbeanstalk.(?P<region>[a-zA-Z0-9\-_]+).amazonaws.com",
|
||||||
|
]
|
||||||
|
|
||||||
|
url_paths = {
|
||||||
|
"{0}/$": EBResponse.dispatch,
|
||||||
|
}
|
@ -10,9 +10,10 @@ from six.moves.urllib.parse import urlparse
|
|||||||
from moto.core.responses import AWSServiceSpec
|
from moto.core.responses import AWSServiceSpec
|
||||||
from moto.core.responses import BaseResponse
|
from moto.core.responses import BaseResponse
|
||||||
from moto.core.responses import xml_to_json_response
|
from moto.core.responses import xml_to_json_response
|
||||||
|
from moto.core.utils import tags_from_query_string
|
||||||
from .exceptions import EmrError
|
from .exceptions import EmrError
|
||||||
from .models import emr_backends
|
from .models import emr_backends
|
||||||
from .utils import steps_from_query_string, tags_from_query_string
|
from .utils import steps_from_query_string
|
||||||
|
|
||||||
|
|
||||||
def generate_boto3_response(operation):
|
def generate_boto3_response(operation):
|
||||||
@ -91,7 +92,7 @@ class ElasticMapReduceResponse(BaseResponse):
|
|||||||
@generate_boto3_response("AddTags")
|
@generate_boto3_response("AddTags")
|
||||||
def add_tags(self):
|
def add_tags(self):
|
||||||
cluster_id = self._get_param("ResourceId")
|
cluster_id = self._get_param("ResourceId")
|
||||||
tags = tags_from_query_string(self.querystring)
|
tags = tags_from_query_string(self.querystring, prefix="Tags")
|
||||||
self.backend.add_tags(cluster_id, tags)
|
self.backend.add_tags(cluster_id, tags)
|
||||||
template = self.response_template(ADD_TAGS_TEMPLATE)
|
template = self.response_template(ADD_TAGS_TEMPLATE)
|
||||||
return template.render()
|
return template.render()
|
||||||
|
@ -22,22 +22,6 @@ def random_instance_group_id(size=13):
|
|||||||
return "i-{0}".format(random_id())
|
return "i-{0}".format(random_id())
|
||||||
|
|
||||||
|
|
||||||
def tags_from_query_string(querystring_dict):
|
|
||||||
prefix = "Tags"
|
|
||||||
suffix = "Key"
|
|
||||||
response_values = {}
|
|
||||||
for key, value in querystring_dict.items():
|
|
||||||
if key.startswith(prefix) and key.endswith(suffix):
|
|
||||||
tag_index = key.replace(prefix + ".", "").replace("." + suffix, "")
|
|
||||||
tag_key = querystring_dict.get("Tags.{0}.Key".format(tag_index))[0]
|
|
||||||
tag_value_key = "Tags.{0}.Value".format(tag_index)
|
|
||||||
if tag_value_key in querystring_dict:
|
|
||||||
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
|
|
||||||
else:
|
|
||||||
response_values[tag_key] = None
|
|
||||||
return response_values
|
|
||||||
|
|
||||||
|
|
||||||
def steps_from_query_string(querystring_dict):
|
def steps_from_query_string(querystring_dict):
|
||||||
steps = []
|
steps = []
|
||||||
for step in querystring_dict:
|
for step in querystring_dict:
|
||||||
|
@ -34,6 +34,9 @@ class GlueBackend(BaseBackend):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
raise DatabaseNotFoundException(database_name)
|
raise DatabaseNotFoundException(database_name)
|
||||||
|
|
||||||
|
def get_databases(self):
|
||||||
|
return [self.databases[key] for key in self.databases] if self.databases else []
|
||||||
|
|
||||||
def create_table(self, database_name, table_name, table_input):
|
def create_table(self, database_name, table_name, table_input):
|
||||||
database = self.get_database(database_name)
|
database = self.get_database(database_name)
|
||||||
|
|
||||||
|
@ -30,6 +30,12 @@ class GlueResponse(BaseResponse):
|
|||||||
database = self.glue_backend.get_database(database_name)
|
database = self.glue_backend.get_database(database_name)
|
||||||
return json.dumps({"Database": {"Name": database.name}})
|
return json.dumps({"Database": {"Name": database.name}})
|
||||||
|
|
||||||
|
def get_databases(self):
|
||||||
|
database_list = self.glue_backend.get_databases()
|
||||||
|
return json.dumps(
|
||||||
|
{"DatabaseList": [{"Name": database.name} for database in database_list]}
|
||||||
|
)
|
||||||
|
|
||||||
def create_table(self):
|
def create_table(self):
|
||||||
database_name = self.parameters.get("DatabaseName")
|
database_name = self.parameters.get("DatabaseName")
|
||||||
table_input = self.parameters.get("TableInput")
|
table_input = self.parameters.get("TableInput")
|
||||||
|
@ -145,10 +145,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
|
|||||||
# Do S3, resource type s3
|
# Do S3, resource type s3
|
||||||
if not resource_type_filters or "s3" in resource_type_filters:
|
if not resource_type_filters or "s3" in resource_type_filters:
|
||||||
for bucket in self.s3_backend.buckets.values():
|
for bucket in self.s3_backend.buckets.values():
|
||||||
tags = []
|
tags = self.s3_backend.tagger.list_tags_for_resource(bucket.arn)["Tags"]
|
||||||
for tag in bucket.tags.tag_set.tags:
|
|
||||||
tags.append({"Key": tag.key, "Value": tag.value})
|
|
||||||
|
|
||||||
if not tags or not tag_filter(
|
if not tags or not tag_filter(
|
||||||
tags
|
tags
|
||||||
): # Skip if no tags, or invalid filter
|
): # Skip if no tags, or invalid filter
|
||||||
@ -362,8 +359,9 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
|
|||||||
|
|
||||||
# Do S3, resource type s3
|
# Do S3, resource type s3
|
||||||
for bucket in self.s3_backend.buckets.values():
|
for bucket in self.s3_backend.buckets.values():
|
||||||
for tag in bucket.tags.tag_set.tags:
|
tags = self.s3_backend.tagger.get_tag_dict_for_resource(bucket.arn)
|
||||||
yield tag.key
|
for key, _ in tags.items():
|
||||||
|
yield key
|
||||||
|
|
||||||
# EC2 tags
|
# EC2 tags
|
||||||
def get_ec2_keys(res_id):
|
def get_ec2_keys(res_id):
|
||||||
@ -414,9 +412,10 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
|
|||||||
|
|
||||||
# Do S3, resource type s3
|
# Do S3, resource type s3
|
||||||
for bucket in self.s3_backend.buckets.values():
|
for bucket in self.s3_backend.buckets.values():
|
||||||
for tag in bucket.tags.tag_set.tags:
|
tags = self.s3_backend.tagger.get_tag_dict_for_resource(bucket.arn)
|
||||||
if tag.key == tag_key:
|
for key, value in tags.items():
|
||||||
yield tag.value
|
if key == tag_key:
|
||||||
|
yield value
|
||||||
|
|
||||||
# EC2 tags
|
# EC2 tags
|
||||||
def get_ec2_values(res_id):
|
def get_ec2_values(res_id):
|
||||||
|
@ -22,6 +22,8 @@ import six
|
|||||||
from bisect import insort
|
from bisect import insort
|
||||||
from moto.core import ACCOUNT_ID, BaseBackend, BaseModel
|
from moto.core import ACCOUNT_ID, BaseBackend, BaseModel
|
||||||
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
|
from moto.core.utils import iso_8601_datetime_with_milliseconds, rfc_1123_datetime
|
||||||
|
from moto.cloudwatch.models import metric_providers, MetricDatum
|
||||||
|
from moto.utilities.tagging_service import TaggingService
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
BucketAlreadyExists,
|
BucketAlreadyExists,
|
||||||
MissingBucket,
|
MissingBucket,
|
||||||
@ -34,7 +36,6 @@ from .exceptions import (
|
|||||||
MalformedXML,
|
MalformedXML,
|
||||||
InvalidStorageClass,
|
InvalidStorageClass,
|
||||||
InvalidTargetBucketForLogging,
|
InvalidTargetBucketForLogging,
|
||||||
DuplicateTagKeys,
|
|
||||||
CrossLocationLoggingProhibitted,
|
CrossLocationLoggingProhibitted,
|
||||||
NoSuchPublicAccessBlockConfiguration,
|
NoSuchPublicAccessBlockConfiguration,
|
||||||
InvalidPublicAccessBlockConfiguration,
|
InvalidPublicAccessBlockConfiguration,
|
||||||
@ -94,6 +95,7 @@ class FakeKey(BaseModel):
|
|||||||
version_id=0,
|
version_id=0,
|
||||||
max_buffer_size=DEFAULT_KEY_BUFFER_SIZE,
|
max_buffer_size=DEFAULT_KEY_BUFFER_SIZE,
|
||||||
multipart=None,
|
multipart=None,
|
||||||
|
bucket_name=None,
|
||||||
):
|
):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.last_modified = datetime.datetime.utcnow()
|
self.last_modified = datetime.datetime.utcnow()
|
||||||
@ -105,8 +107,8 @@ class FakeKey(BaseModel):
|
|||||||
self._etag = etag
|
self._etag = etag
|
||||||
self._version_id = version_id
|
self._version_id = version_id
|
||||||
self._is_versioned = is_versioned
|
self._is_versioned = is_versioned
|
||||||
self._tagging = FakeTagging()
|
|
||||||
self.multipart = multipart
|
self.multipart = multipart
|
||||||
|
self.bucket_name = bucket_name
|
||||||
|
|
||||||
self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size)
|
self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size)
|
||||||
self._max_buffer_size = max_buffer_size
|
self._max_buffer_size = max_buffer_size
|
||||||
@ -126,6 +128,13 @@ class FakeKey(BaseModel):
|
|||||||
self.lock.release()
|
self.lock.release()
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arn(self):
|
||||||
|
# S3 Objects don't have an ARN, but we do need something unique when creating tags against this resource
|
||||||
|
return "arn:aws:s3:::{}/{}/{}".format(
|
||||||
|
self.bucket_name, self.name, self.version_id
|
||||||
|
)
|
||||||
|
|
||||||
@value.setter
|
@value.setter
|
||||||
def value(self, new_value):
|
def value(self, new_value):
|
||||||
self._value_buffer.seek(0)
|
self._value_buffer.seek(0)
|
||||||
@ -152,9 +161,6 @@ class FakeKey(BaseModel):
|
|||||||
self._metadata = {}
|
self._metadata = {}
|
||||||
self._metadata.update(metadata)
|
self._metadata.update(metadata)
|
||||||
|
|
||||||
def set_tagging(self, tagging):
|
|
||||||
self._tagging = tagging
|
|
||||||
|
|
||||||
def set_storage_class(self, storage):
|
def set_storage_class(self, storage):
|
||||||
if storage is not None and storage not in STORAGE_CLASS:
|
if storage is not None and storage not in STORAGE_CLASS:
|
||||||
raise InvalidStorageClass(storage=storage)
|
raise InvalidStorageClass(storage=storage)
|
||||||
@ -210,10 +216,6 @@ class FakeKey(BaseModel):
|
|||||||
def metadata(self):
|
def metadata(self):
|
||||||
return self._metadata
|
return self._metadata
|
||||||
|
|
||||||
@property
|
|
||||||
def tagging(self):
|
|
||||||
return self._tagging
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def response_dict(self):
|
def response_dict(self):
|
||||||
res = {
|
res = {
|
||||||
@ -471,26 +473,10 @@ def get_canned_acl(acl):
|
|||||||
return FakeAcl(grants=grants)
|
return FakeAcl(grants=grants)
|
||||||
|
|
||||||
|
|
||||||
class FakeTagging(BaseModel):
|
|
||||||
def __init__(self, tag_set=None):
|
|
||||||
self.tag_set = tag_set or FakeTagSet()
|
|
||||||
|
|
||||||
|
|
||||||
class FakeTagSet(BaseModel):
|
|
||||||
def __init__(self, tags=None):
|
|
||||||
self.tags = tags or []
|
|
||||||
|
|
||||||
|
|
||||||
class FakeTag(BaseModel):
|
|
||||||
def __init__(self, key, value=None):
|
|
||||||
self.key = key
|
|
||||||
self.value = value
|
|
||||||
|
|
||||||
|
|
||||||
class LifecycleFilter(BaseModel):
|
class LifecycleFilter(BaseModel):
|
||||||
def __init__(self, prefix=None, tag=None, and_filter=None):
|
def __init__(self, prefix=None, tag=None, and_filter=None):
|
||||||
self.prefix = prefix
|
self.prefix = prefix
|
||||||
self.tag = tag
|
(self.tag_key, self.tag_value) = tag if tag else (None, None)
|
||||||
self.and_filter = and_filter
|
self.and_filter = and_filter
|
||||||
|
|
||||||
def to_config_dict(self):
|
def to_config_dict(self):
|
||||||
@ -499,11 +485,11 @@ class LifecycleFilter(BaseModel):
|
|||||||
"predicate": {"type": "LifecyclePrefixPredicate", "prefix": self.prefix}
|
"predicate": {"type": "LifecyclePrefixPredicate", "prefix": self.prefix}
|
||||||
}
|
}
|
||||||
|
|
||||||
elif self.tag:
|
elif self.tag_key:
|
||||||
return {
|
return {
|
||||||
"predicate": {
|
"predicate": {
|
||||||
"type": "LifecycleTagPredicate",
|
"type": "LifecycleTagPredicate",
|
||||||
"tag": {"key": self.tag.key, "value": self.tag.value},
|
"tag": {"key": self.tag_key, "value": self.tag_value},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -527,12 +513,9 @@ class LifecycleAndFilter(BaseModel):
|
|||||||
if self.prefix is not None:
|
if self.prefix is not None:
|
||||||
data.append({"type": "LifecyclePrefixPredicate", "prefix": self.prefix})
|
data.append({"type": "LifecyclePrefixPredicate", "prefix": self.prefix})
|
||||||
|
|
||||||
for tag in self.tags:
|
for key, value in self.tags.items():
|
||||||
data.append(
|
data.append(
|
||||||
{
|
{"type": "LifecycleTagPredicate", "tag": {"key": key, "value": value},}
|
||||||
"type": "LifecycleTagPredicate",
|
|
||||||
"tag": {"key": tag.key, "value": tag.value},
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@ -787,7 +770,6 @@ class FakeBucket(BaseModel):
|
|||||||
self.policy = None
|
self.policy = None
|
||||||
self.website_configuration = None
|
self.website_configuration = None
|
||||||
self.acl = get_canned_acl("private")
|
self.acl = get_canned_acl("private")
|
||||||
self.tags = FakeTagging()
|
|
||||||
self.cors = []
|
self.cors = []
|
||||||
self.logging = {}
|
self.logging = {}
|
||||||
self.notification_configuration = None
|
self.notification_configuration = None
|
||||||
@ -879,7 +861,7 @@ class FakeBucket(BaseModel):
|
|||||||
and_filter = None
|
and_filter = None
|
||||||
if rule["Filter"].get("And"):
|
if rule["Filter"].get("And"):
|
||||||
filters += 1
|
filters += 1
|
||||||
and_tags = []
|
and_tags = {}
|
||||||
if rule["Filter"]["And"].get("Tag"):
|
if rule["Filter"]["And"].get("Tag"):
|
||||||
if not isinstance(rule["Filter"]["And"]["Tag"], list):
|
if not isinstance(rule["Filter"]["And"]["Tag"], list):
|
||||||
rule["Filter"]["And"]["Tag"] = [
|
rule["Filter"]["And"]["Tag"] = [
|
||||||
@ -887,7 +869,7 @@ class FakeBucket(BaseModel):
|
|||||||
]
|
]
|
||||||
|
|
||||||
for t in rule["Filter"]["And"]["Tag"]:
|
for t in rule["Filter"]["And"]["Tag"]:
|
||||||
and_tags.append(FakeTag(t["Key"], t.get("Value", "")))
|
and_tags[t["Key"]] = t.get("Value", "")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
and_prefix = (
|
and_prefix = (
|
||||||
@ -901,7 +883,7 @@ class FakeBucket(BaseModel):
|
|||||||
filter_tag = None
|
filter_tag = None
|
||||||
if rule["Filter"].get("Tag"):
|
if rule["Filter"].get("Tag"):
|
||||||
filters += 1
|
filters += 1
|
||||||
filter_tag = FakeTag(
|
filter_tag = (
|
||||||
rule["Filter"]["Tag"]["Key"],
|
rule["Filter"]["Tag"]["Key"],
|
||||||
rule["Filter"]["Tag"].get("Value", ""),
|
rule["Filter"]["Tag"].get("Value", ""),
|
||||||
)
|
)
|
||||||
@ -988,16 +970,6 @@ class FakeBucket(BaseModel):
|
|||||||
def delete_cors(self):
|
def delete_cors(self):
|
||||||
self.cors = []
|
self.cors = []
|
||||||
|
|
||||||
def set_tags(self, tagging):
|
|
||||||
self.tags = tagging
|
|
||||||
|
|
||||||
def delete_tags(self):
|
|
||||||
self.tags = FakeTagging()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def tagging(self):
|
|
||||||
return self.tags
|
|
||||||
|
|
||||||
def set_logging(self, logging_config, bucket_backend):
|
def set_logging(self, logging_config, bucket_backend):
|
||||||
if not logging_config:
|
if not logging_config:
|
||||||
self.logging = {}
|
self.logging = {}
|
||||||
@ -1085,6 +1057,10 @@ class FakeBucket(BaseModel):
|
|||||||
def set_acl(self, acl):
|
def set_acl(self, acl):
|
||||||
self.acl = acl
|
self.acl = acl
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arn(self):
|
||||||
|
return "arn:aws:s3:::{}".format(self.name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def physical_resource_id(self):
|
def physical_resource_id(self):
|
||||||
return self.name
|
return self.name
|
||||||
@ -1110,7 +1086,7 @@ class FakeBucket(BaseModel):
|
|||||||
int(time.mktime(self.creation_date.timetuple()))
|
int(time.mktime(self.creation_date.timetuple()))
|
||||||
), # PY2 and 3 compatible
|
), # PY2 and 3 compatible
|
||||||
"configurationItemMD5Hash": "",
|
"configurationItemMD5Hash": "",
|
||||||
"arn": "arn:aws:s3:::{}".format(self.name),
|
"arn": self.arn,
|
||||||
"resourceType": "AWS::S3::Bucket",
|
"resourceType": "AWS::S3::Bucket",
|
||||||
"resourceId": self.name,
|
"resourceId": self.name,
|
||||||
"resourceName": self.name,
|
"resourceName": self.name,
|
||||||
@ -1119,7 +1095,7 @@ class FakeBucket(BaseModel):
|
|||||||
"resourceCreationTime": str(self.creation_date),
|
"resourceCreationTime": str(self.creation_date),
|
||||||
"relatedEvents": [],
|
"relatedEvents": [],
|
||||||
"relationships": [],
|
"relationships": [],
|
||||||
"tags": {tag.key: tag.value for tag in self.tagging.tag_set.tags},
|
"tags": s3_backend.tagger.get_tag_dict_for_resource(self.arn),
|
||||||
"configuration": {
|
"configuration": {
|
||||||
"name": self.name,
|
"name": self.name,
|
||||||
"owner": {"id": OWNER},
|
"owner": {"id": OWNER},
|
||||||
@ -1181,6 +1157,40 @@ class S3Backend(BaseBackend):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.buckets = {}
|
self.buckets = {}
|
||||||
self.account_public_access_block = None
|
self.account_public_access_block = None
|
||||||
|
self.tagger = TaggingService()
|
||||||
|
|
||||||
|
# Register this class as a CloudWatch Metric Provider
|
||||||
|
# Must provide a method 'get_cloudwatch_metrics' that will return a list of metrics, based on the data available
|
||||||
|
metric_providers["S3"] = self
|
||||||
|
|
||||||
|
def get_cloudwatch_metrics(self):
|
||||||
|
metrics = []
|
||||||
|
for name, bucket in self.buckets.items():
|
||||||
|
metrics.append(
|
||||||
|
MetricDatum(
|
||||||
|
namespace="AWS/S3",
|
||||||
|
name="BucketSizeBytes",
|
||||||
|
value=bucket.keys.item_size(),
|
||||||
|
dimensions=[
|
||||||
|
{"Name": "StorageType", "Value": "StandardStorage"},
|
||||||
|
{"Name": "BucketName", "Value": name},
|
||||||
|
],
|
||||||
|
timestamp=datetime.datetime.now(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
metrics.append(
|
||||||
|
MetricDatum(
|
||||||
|
namespace="AWS/S3",
|
||||||
|
name="NumberOfObjects",
|
||||||
|
value=len(bucket.keys),
|
||||||
|
dimensions=[
|
||||||
|
{"Name": "StorageType", "Value": "AllStorageTypes"},
|
||||||
|
{"Name": "BucketName", "Value": name},
|
||||||
|
],
|
||||||
|
timestamp=datetime.datetime.now(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return metrics
|
||||||
|
|
||||||
def create_bucket(self, bucket_name, region_name):
|
def create_bucket(self, bucket_name, region_name):
|
||||||
if bucket_name in self.buckets:
|
if bucket_name in self.buckets:
|
||||||
@ -1350,23 +1360,32 @@ class S3Backend(BaseBackend):
|
|||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def set_key_tagging(self, bucket_name, key_name, tagging, version_id=None):
|
def get_key_tags(self, key):
|
||||||
key = self.get_key(bucket_name, key_name, version_id)
|
return self.tagger.list_tags_for_resource(key.arn)
|
||||||
|
|
||||||
|
def set_key_tags(self, key, tags, key_name=None):
|
||||||
if key is None:
|
if key is None:
|
||||||
raise MissingKey(key_name)
|
raise MissingKey(key_name)
|
||||||
key.set_tagging(tagging)
|
self.tagger.delete_all_tags_for_resource(key.arn)
|
||||||
|
self.tagger.tag_resource(
|
||||||
|
key.arn, [{"Key": key, "Value": value} for key, value in tags.items()],
|
||||||
|
)
|
||||||
return key
|
return key
|
||||||
|
|
||||||
def put_bucket_tagging(self, bucket_name, tagging):
|
def get_bucket_tags(self, bucket_name):
|
||||||
tag_keys = [tag.key for tag in tagging.tag_set.tags]
|
|
||||||
if len(tag_keys) != len(set(tag_keys)):
|
|
||||||
raise DuplicateTagKeys()
|
|
||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
bucket.set_tags(tagging)
|
return self.tagger.list_tags_for_resource(bucket.arn)
|
||||||
|
|
||||||
|
def put_bucket_tags(self, bucket_name, tags):
|
||||||
|
bucket = self.get_bucket(bucket_name)
|
||||||
|
self.tagger.delete_all_tags_for_resource(bucket.arn)
|
||||||
|
self.tagger.tag_resource(
|
||||||
|
bucket.arn, [{"Key": key, "Value": value} for key, value in tags.items()],
|
||||||
|
)
|
||||||
|
|
||||||
def delete_bucket_tagging(self, bucket_name):
|
def delete_bucket_tagging(self, bucket_name):
|
||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
bucket.delete_tags()
|
self.tagger.delete_all_tags_for_resource(bucket.arn)
|
||||||
|
|
||||||
def put_bucket_cors(self, bucket_name, cors_rules):
|
def put_bucket_cors(self, bucket_name, cors_rules):
|
||||||
bucket = self.get_bucket(bucket_name)
|
bucket = self.get_bucket(bucket_name)
|
||||||
@ -1574,6 +1593,7 @@ class S3Backend(BaseBackend):
|
|||||||
key = self.get_key(src_bucket_name, src_key_name, version_id=src_version_id)
|
key = self.get_key(src_bucket_name, src_key_name, version_id=src_version_id)
|
||||||
|
|
||||||
new_key = key.copy(dest_key_name, dest_bucket.is_versioned)
|
new_key = key.copy(dest_key_name, dest_bucket.is_versioned)
|
||||||
|
self.tagger.copy_tags(key.arn, new_key.arn)
|
||||||
|
|
||||||
if storage is not None:
|
if storage is not None:
|
||||||
new_key.set_storage_class(storage)
|
new_key.set_storage_class(storage)
|
||||||
|
@ -5,7 +5,6 @@ import sys
|
|||||||
|
|
||||||
import six
|
import six
|
||||||
from botocore.awsrequest import AWSPreparedRequest
|
from botocore.awsrequest import AWSPreparedRequest
|
||||||
from werkzeug.wrappers import Request
|
|
||||||
|
|
||||||
from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys
|
from moto.core.utils import str_to_rfc_1123_datetime, py2_strip_unicode_keys
|
||||||
from six.moves.urllib.parse import parse_qs, urlparse, unquote, parse_qsl
|
from six.moves.urllib.parse import parse_qs, urlparse, unquote, parse_qsl
|
||||||
@ -25,6 +24,7 @@ from moto.s3bucket_path.utils import (
|
|||||||
|
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
BucketAlreadyExists,
|
BucketAlreadyExists,
|
||||||
|
DuplicateTagKeys,
|
||||||
S3ClientError,
|
S3ClientError,
|
||||||
MissingBucket,
|
MissingBucket,
|
||||||
MissingKey,
|
MissingKey,
|
||||||
@ -44,9 +44,6 @@ from .models import (
|
|||||||
FakeGrant,
|
FakeGrant,
|
||||||
FakeAcl,
|
FakeAcl,
|
||||||
FakeKey,
|
FakeKey,
|
||||||
FakeTagging,
|
|
||||||
FakeTagSet,
|
|
||||||
FakeTag,
|
|
||||||
)
|
)
|
||||||
from .utils import (
|
from .utils import (
|
||||||
bucket_name_from_url,
|
bucket_name_from_url,
|
||||||
@ -135,7 +132,8 @@ ACTION_MAP = {
|
|||||||
|
|
||||||
|
|
||||||
def parse_key_name(pth):
|
def parse_key_name(pth):
|
||||||
return pth.lstrip("/")
|
# strip the first '/' left by urlparse
|
||||||
|
return pth[1:] if pth.startswith("/") else pth
|
||||||
|
|
||||||
|
|
||||||
def is_delete_keys(request, path, bucket_name):
|
def is_delete_keys(request, path, bucket_name):
|
||||||
@ -379,13 +377,13 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
template = self.response_template(S3_OBJECT_ACL_RESPONSE)
|
template = self.response_template(S3_OBJECT_ACL_RESPONSE)
|
||||||
return template.render(obj=bucket)
|
return template.render(obj=bucket)
|
||||||
elif "tagging" in querystring:
|
elif "tagging" in querystring:
|
||||||
bucket = self.backend.get_bucket(bucket_name)
|
tags = self.backend.get_bucket_tags(bucket_name)["Tags"]
|
||||||
# "Special Error" if no tags:
|
# "Special Error" if no tags:
|
||||||
if len(bucket.tagging.tag_set.tags) == 0:
|
if len(tags) == 0:
|
||||||
template = self.response_template(S3_NO_BUCKET_TAGGING)
|
template = self.response_template(S3_NO_BUCKET_TAGGING)
|
||||||
return 404, {}, template.render(bucket_name=bucket_name)
|
return 404, {}, template.render(bucket_name=bucket_name)
|
||||||
template = self.response_template(S3_BUCKET_TAGGING_RESPONSE)
|
template = self.response_template(S3_OBJECT_TAGGING_RESPONSE)
|
||||||
return template.render(bucket=bucket)
|
return template.render(tags=tags)
|
||||||
elif "logging" in querystring:
|
elif "logging" in querystring:
|
||||||
bucket = self.backend.get_bucket(bucket_name)
|
bucket = self.backend.get_bucket(bucket_name)
|
||||||
if not bucket.logging:
|
if not bucket.logging:
|
||||||
@ -653,7 +651,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
return ""
|
return ""
|
||||||
elif "tagging" in querystring:
|
elif "tagging" in querystring:
|
||||||
tagging = self._bucket_tagging_from_xml(body)
|
tagging = self._bucket_tagging_from_xml(body)
|
||||||
self.backend.put_bucket_tagging(bucket_name, tagging)
|
self.backend.put_bucket_tags(bucket_name, tagging)
|
||||||
return ""
|
return ""
|
||||||
elif "website" in querystring:
|
elif "website" in querystring:
|
||||||
self.backend.set_bucket_website_configuration(bucket_name, body)
|
self.backend.set_bucket_website_configuration(bucket_name, body)
|
||||||
@ -797,14 +795,6 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
if hasattr(request, "form"):
|
if hasattr(request, "form"):
|
||||||
# Not HTTPretty
|
# Not HTTPretty
|
||||||
form = request.form
|
form = request.form
|
||||||
elif request.headers.get("Content-Type").startswith("multipart/form-data"):
|
|
||||||
request = Request.from_values(
|
|
||||||
input_stream=six.BytesIO(request.body),
|
|
||||||
content_length=request.headers["Content-Length"],
|
|
||||||
content_type=request.headers["Content-Type"],
|
|
||||||
method="POST",
|
|
||||||
)
|
|
||||||
form = request.form
|
|
||||||
else:
|
else:
|
||||||
# HTTPretty, build new form object
|
# HTTPretty, build new form object
|
||||||
body = body.decode()
|
body = body.decode()
|
||||||
@ -1107,8 +1097,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
template = self.response_template(S3_OBJECT_ACL_RESPONSE)
|
template = self.response_template(S3_OBJECT_ACL_RESPONSE)
|
||||||
return 200, response_headers, template.render(obj=key)
|
return 200, response_headers, template.render(obj=key)
|
||||||
if "tagging" in query:
|
if "tagging" in query:
|
||||||
|
tags = self.backend.get_key_tags(key)["Tags"]
|
||||||
template = self.response_template(S3_OBJECT_TAGGING_RESPONSE)
|
template = self.response_template(S3_OBJECT_TAGGING_RESPONSE)
|
||||||
return 200, response_headers, template.render(obj=key)
|
return 200, response_headers, template.render(tags=tags)
|
||||||
|
|
||||||
response_headers.update(key.metadata)
|
response_headers.update(key.metadata)
|
||||||
response_headers.update(key.response_dict)
|
response_headers.update(key.response_dict)
|
||||||
@ -1180,8 +1171,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
version_id = query["versionId"][0]
|
version_id = query["versionId"][0]
|
||||||
else:
|
else:
|
||||||
version_id = None
|
version_id = None
|
||||||
|
key = self.backend.get_key(bucket_name, key_name, version_id=version_id)
|
||||||
tagging = self._tagging_from_xml(body)
|
tagging = self._tagging_from_xml(body)
|
||||||
self.backend.set_key_tagging(bucket_name, key_name, tagging, version_id)
|
self.backend.set_key_tags(key, tagging, key_name)
|
||||||
return 200, response_headers, ""
|
return 200, response_headers, ""
|
||||||
|
|
||||||
if "x-amz-copy-source" in request.headers:
|
if "x-amz-copy-source" in request.headers:
|
||||||
@ -1222,7 +1214,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
tdirective = request.headers.get("x-amz-tagging-directive")
|
tdirective = request.headers.get("x-amz-tagging-directive")
|
||||||
if tdirective == "REPLACE":
|
if tdirective == "REPLACE":
|
||||||
tagging = self._tagging_from_headers(request.headers)
|
tagging = self._tagging_from_headers(request.headers)
|
||||||
new_key.set_tagging(tagging)
|
self.backend.set_key_tags(new_key, tagging)
|
||||||
template = self.response_template(S3_OBJECT_COPY_RESPONSE)
|
template = self.response_template(S3_OBJECT_COPY_RESPONSE)
|
||||||
response_headers.update(new_key.response_dict)
|
response_headers.update(new_key.response_dict)
|
||||||
return 200, response_headers, template.render(key=new_key)
|
return 200, response_headers, template.render(key=new_key)
|
||||||
@ -1246,7 +1238,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
new_key.website_redirect_location = request.headers.get(
|
new_key.website_redirect_location = request.headers.get(
|
||||||
"x-amz-website-redirect-location"
|
"x-amz-website-redirect-location"
|
||||||
)
|
)
|
||||||
new_key.set_tagging(tagging)
|
self.backend.set_key_tags(new_key, tagging)
|
||||||
|
|
||||||
response_headers.update(new_key.response_dict)
|
response_headers.update(new_key.response_dict)
|
||||||
return 200, response_headers, ""
|
return 200, response_headers, ""
|
||||||
@ -1374,55 +1366,45 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def _tagging_from_headers(self, headers):
|
def _tagging_from_headers(self, headers):
|
||||||
|
tags = {}
|
||||||
if headers.get("x-amz-tagging"):
|
if headers.get("x-amz-tagging"):
|
||||||
parsed_header = parse_qs(headers["x-amz-tagging"], keep_blank_values=True)
|
parsed_header = parse_qs(headers["x-amz-tagging"], keep_blank_values=True)
|
||||||
tags = []
|
|
||||||
for tag in parsed_header.items():
|
for tag in parsed_header.items():
|
||||||
tags.append(FakeTag(tag[0], tag[1][0]))
|
tags[tag[0]] = tag[1][0]
|
||||||
|
return tags
|
||||||
tag_set = FakeTagSet(tags)
|
|
||||||
tagging = FakeTagging(tag_set)
|
|
||||||
return tagging
|
|
||||||
else:
|
|
||||||
return FakeTagging()
|
|
||||||
|
|
||||||
def _tagging_from_xml(self, xml):
|
def _tagging_from_xml(self, xml):
|
||||||
parsed_xml = xmltodict.parse(xml, force_list={"Tag": True})
|
parsed_xml = xmltodict.parse(xml, force_list={"Tag": True})
|
||||||
|
|
||||||
tags = []
|
tags = {}
|
||||||
for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]:
|
for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]:
|
||||||
tags.append(FakeTag(tag["Key"], tag["Value"]))
|
tags[tag["Key"]] = tag["Value"]
|
||||||
|
|
||||||
tag_set = FakeTagSet(tags)
|
return tags
|
||||||
tagging = FakeTagging(tag_set)
|
|
||||||
return tagging
|
|
||||||
|
|
||||||
def _bucket_tagging_from_xml(self, xml):
|
def _bucket_tagging_from_xml(self, xml):
|
||||||
parsed_xml = xmltodict.parse(xml)
|
parsed_xml = xmltodict.parse(xml)
|
||||||
|
|
||||||
tags = []
|
tags = {}
|
||||||
# Optional if no tags are being sent:
|
# Optional if no tags are being sent:
|
||||||
if parsed_xml["Tagging"].get("TagSet"):
|
if parsed_xml["Tagging"].get("TagSet"):
|
||||||
# If there is only 1 tag, then it's not a list:
|
# If there is only 1 tag, then it's not a list:
|
||||||
if not isinstance(parsed_xml["Tagging"]["TagSet"]["Tag"], list):
|
if not isinstance(parsed_xml["Tagging"]["TagSet"]["Tag"], list):
|
||||||
tags.append(
|
tags[parsed_xml["Tagging"]["TagSet"]["Tag"]["Key"]] = parsed_xml[
|
||||||
FakeTag(
|
"Tagging"
|
||||||
parsed_xml["Tagging"]["TagSet"]["Tag"]["Key"],
|
]["TagSet"]["Tag"]["Value"]
|
||||||
parsed_xml["Tagging"]["TagSet"]["Tag"]["Value"],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]:
|
for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]:
|
||||||
tags.append(FakeTag(tag["Key"], tag["Value"]))
|
if tag["Key"] in tags:
|
||||||
|
raise DuplicateTagKeys()
|
||||||
|
tags[tag["Key"]] = tag["Value"]
|
||||||
|
|
||||||
# Verify that "aws:" is not in the tags. If so, then this is a problem:
|
# Verify that "aws:" is not in the tags. If so, then this is a problem:
|
||||||
for tag in tags:
|
for key, _ in tags.items():
|
||||||
if tag.key.startswith("aws:"):
|
if key.startswith("aws:"):
|
||||||
raise NoSystemTags()
|
raise NoSystemTags()
|
||||||
|
|
||||||
tag_set = FakeTagSet(tags)
|
return tags
|
||||||
tagging = FakeTagging(tag_set)
|
|
||||||
return tagging
|
|
||||||
|
|
||||||
def _cors_from_xml(self, xml):
|
def _cors_from_xml(self, xml):
|
||||||
parsed_xml = xmltodict.parse(xml)
|
parsed_xml = xmltodict.parse(xml)
|
||||||
@ -1742,10 +1724,10 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
|
|||||||
{% if rule.filter.prefix != None %}
|
{% if rule.filter.prefix != None %}
|
||||||
<Prefix>{{ rule.filter.prefix }}</Prefix>
|
<Prefix>{{ rule.filter.prefix }}</Prefix>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if rule.filter.tag %}
|
{% if rule.filter.tag_key %}
|
||||||
<Tag>
|
<Tag>
|
||||||
<Key>{{ rule.filter.tag.key }}</Key>
|
<Key>{{ rule.filter.tag_key }}</Key>
|
||||||
<Value>{{ rule.filter.tag.value }}</Value>
|
<Value>{{ rule.filter.tag_value }}</Value>
|
||||||
</Tag>
|
</Tag>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if rule.filter.and_filter %}
|
{% if rule.filter.and_filter %}
|
||||||
@ -1753,10 +1735,10 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
|
|||||||
{% if rule.filter.and_filter.prefix != None %}
|
{% if rule.filter.and_filter.prefix != None %}
|
||||||
<Prefix>{{ rule.filter.and_filter.prefix }}</Prefix>
|
<Prefix>{{ rule.filter.and_filter.prefix }}</Prefix>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% for tag in rule.filter.and_filter.tags %}
|
{% for key, value in rule.filter.and_filter.tags.items() %}
|
||||||
<Tag>
|
<Tag>
|
||||||
<Key>{{ tag.key }}</Key>
|
<Key>{{ key }}</Key>
|
||||||
<Value>{{ tag.value }}</Value>
|
<Value>{{ value }}</Value>
|
||||||
</Tag>
|
</Tag>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</And>
|
</And>
|
||||||
@ -1917,22 +1899,10 @@ S3_OBJECT_TAGGING_RESPONSE = """\
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<Tagging xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
<Tagging xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||||
<TagSet>
|
<TagSet>
|
||||||
{% for tag in obj.tagging.tag_set.tags %}
|
{% for tag in tags %}
|
||||||
<Tag>
|
<Tag>
|
||||||
<Key>{{ tag.key }}</Key>
|
<Key>{{ tag.Key }}</Key>
|
||||||
<Value>{{ tag.value }}</Value>
|
<Value>{{ tag.Value }}</Value>
|
||||||
</Tag>
|
|
||||||
{% endfor %}
|
|
||||||
</TagSet>
|
|
||||||
</Tagging>"""
|
|
||||||
|
|
||||||
S3_BUCKET_TAGGING_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<Tagging>
|
|
||||||
<TagSet>
|
|
||||||
{% for tag in bucket.tagging.tag_set.tags %}
|
|
||||||
<Tag>
|
|
||||||
<Key>{{ tag.key }}</Key>
|
|
||||||
<Value>{{ tag.value }}</Value>
|
|
||||||
</Tag>
|
</Tag>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</TagSet>
|
</TagSet>
|
||||||
|
@ -15,5 +15,5 @@ url_paths = {
|
|||||||
# path-based bucket + key
|
# path-based bucket + key
|
||||||
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_or_control_response,
|
"{0}/(?P<bucket_name_path>[^/]+)/(?P<key_name>.+)": S3ResponseInstance.key_or_control_response,
|
||||||
# subdomain bucket + key with empty first part of path
|
# subdomain bucket + key with empty first part of path
|
||||||
"{0}//(?P<key_name>.*)$": S3ResponseInstance.key_or_control_response,
|
"{0}/(?P<key_name>/.*)$": S3ResponseInstance.key_or_control_response,
|
||||||
}
|
}
|
||||||
|
@ -146,6 +146,12 @@ class _VersionedKeyStore(dict):
|
|||||||
for key in self:
|
for key in self:
|
||||||
yield key, self.getlist(key)
|
yield key, self.getlist(key)
|
||||||
|
|
||||||
|
def item_size(self):
|
||||||
|
size = 0
|
||||||
|
for val in self.values():
|
||||||
|
size += sys.getsizeof(val)
|
||||||
|
return size
|
||||||
|
|
||||||
items = iteritems = _iteritems
|
items = iteritems = _iteritems
|
||||||
lists = iterlists = _iterlists
|
lists = iterlists = _iterlists
|
||||||
values = itervalues = _itervalues
|
values = itervalues = _itervalues
|
||||||
|
@ -5,15 +5,23 @@ class TaggingService:
|
|||||||
self.valueName = valueName
|
self.valueName = valueName
|
||||||
self.tags = {}
|
self.tags = {}
|
||||||
|
|
||||||
|
def get_tag_dict_for_resource(self, arn):
|
||||||
|
result = {}
|
||||||
|
if self.has_tags(arn):
|
||||||
|
for k, v in self.tags[arn].items():
|
||||||
|
result[k] = v
|
||||||
|
return result
|
||||||
|
|
||||||
def list_tags_for_resource(self, arn):
|
def list_tags_for_resource(self, arn):
|
||||||
result = []
|
result = []
|
||||||
if arn in self.tags:
|
if self.has_tags(arn):
|
||||||
for k, v in self.tags[arn].items():
|
for k, v in self.tags[arn].items():
|
||||||
result.append({self.keyName: k, self.valueName: v})
|
result.append({self.keyName: k, self.valueName: v})
|
||||||
return {self.tagName: result}
|
return {self.tagName: result}
|
||||||
|
|
||||||
def delete_all_tags_for_resource(self, arn):
|
def delete_all_tags_for_resource(self, arn):
|
||||||
del self.tags[arn]
|
if self.has_tags(arn):
|
||||||
|
del self.tags[arn]
|
||||||
|
|
||||||
def has_tags(self, arn):
|
def has_tags(self, arn):
|
||||||
return arn in self.tags
|
return arn in self.tags
|
||||||
@ -27,6 +35,12 @@ class TaggingService:
|
|||||||
else:
|
else:
|
||||||
self.tags[arn][t[self.keyName]] = None
|
self.tags[arn][t[self.keyName]] = None
|
||||||
|
|
||||||
|
def copy_tags(self, from_arn, to_arn):
|
||||||
|
if self.has_tags(from_arn):
|
||||||
|
self.tag_resource(
|
||||||
|
to_arn, self.list_tags_for_resource(from_arn)[self.tagName]
|
||||||
|
)
|
||||||
|
|
||||||
def untag_resource_using_names(self, arn, tag_names):
|
def untag_resource_using_names(self, arn, tag_names):
|
||||||
for name in tag_names:
|
for name in tag_names:
|
||||||
if name in self.tags.get(arn, {}):
|
if name in self.tags.get(arn, {}):
|
||||||
|
@ -69,6 +69,22 @@ def test_create_rest_api_with_tags():
|
|||||||
response["tags"].should.equal({"MY_TAG1": "MY_VALUE1"})
|
response["tags"].should.equal({"MY_TAG1": "MY_VALUE1"})
|
||||||
|
|
||||||
|
|
||||||
|
@mock_apigateway
|
||||||
|
def test_create_rest_api_with_policy():
|
||||||
|
client = boto3.client("apigateway", region_name="us-west-2")
|
||||||
|
|
||||||
|
policy = '{"Version": "2012-10-17","Statement": []}'
|
||||||
|
response = client.create_rest_api(
|
||||||
|
name="my_api", description="this is my api", policy=policy
|
||||||
|
)
|
||||||
|
api_id = response["id"]
|
||||||
|
|
||||||
|
response = client.get_rest_api(restApiId=api_id)
|
||||||
|
|
||||||
|
assert "policy" in response
|
||||||
|
response["policy"].should.equal(policy)
|
||||||
|
|
||||||
|
|
||||||
@mock_apigateway
|
@mock_apigateway
|
||||||
def test_create_rest_api_invalid_apikeysource():
|
def test_create_rest_api_invalid_apikeysource():
|
||||||
client = boto3.client("apigateway", region_name="us-west-2")
|
client = boto3.client("apigateway", region_name="us-west-2")
|
||||||
|
@ -1677,6 +1677,42 @@ def test_create_function_with_unknown_arn():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_lambda
|
||||||
|
def test_remove_function_permission():
|
||||||
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
zip_content = get_test_zip_file1()
|
||||||
|
conn.create_function(
|
||||||
|
FunctionName="testFunction",
|
||||||
|
Runtime="python2.7",
|
||||||
|
Role=(get_role_name()),
|
||||||
|
Handler="lambda_function.handler",
|
||||||
|
Code={"ZipFile": zip_content},
|
||||||
|
Description="test lambda function",
|
||||||
|
Timeout=3,
|
||||||
|
MemorySize=128,
|
||||||
|
Publish=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
conn.add_permission(
|
||||||
|
FunctionName="testFunction",
|
||||||
|
StatementId="1",
|
||||||
|
Action="lambda:InvokeFunction",
|
||||||
|
Principal="432143214321",
|
||||||
|
SourceArn="arn:aws:lambda:us-west-2:account-id:function:helloworld",
|
||||||
|
SourceAccount="123412341234",
|
||||||
|
EventSourceToken="blah",
|
||||||
|
Qualifier="2",
|
||||||
|
)
|
||||||
|
|
||||||
|
remove = conn.remove_permission(
|
||||||
|
FunctionName="testFunction", StatementId="1", Qualifier="2",
|
||||||
|
)
|
||||||
|
remove["ResponseMetadata"]["HTTPStatusCode"].should.equal(204)
|
||||||
|
policy = conn.get_policy(FunctionName="testFunction", Qualifier="2")["Policy"]
|
||||||
|
policy = json.loads(policy)
|
||||||
|
policy["Statement"].should.equal([])
|
||||||
|
|
||||||
|
|
||||||
def create_invalid_lambda(role):
|
def create_invalid_lambda(role):
|
||||||
conn = boto3.client("lambda", _lambda_region)
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
zip_content = get_test_zip_file1()
|
zip_content = get_test_zip_file1()
|
||||||
|
@ -835,8 +835,10 @@ def test_describe_change_set():
|
|||||||
)
|
)
|
||||||
|
|
||||||
stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet")
|
stack = cf_conn.describe_change_set(ChangeSetName="NewChangeSet")
|
||||||
|
|
||||||
stack["ChangeSetName"].should.equal("NewChangeSet")
|
stack["ChangeSetName"].should.equal("NewChangeSet")
|
||||||
stack["StackName"].should.equal("NewStack")
|
stack["StackName"].should.equal("NewStack")
|
||||||
|
stack["Status"].should.equal("REVIEW_IN_PROGRESS")
|
||||||
|
|
||||||
cf_conn.create_change_set(
|
cf_conn.create_change_set(
|
||||||
StackName="NewStack",
|
StackName="NewStack",
|
||||||
@ -851,15 +853,30 @@ def test_describe_change_set():
|
|||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
|
@mock_ec2
|
||||||
def test_execute_change_set_w_arn():
|
def test_execute_change_set_w_arn():
|
||||||
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
|
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
|
||||||
|
ec2 = boto3.client("ec2", region_name="us-east-1")
|
||||||
|
# Verify no instances exist at the moment
|
||||||
|
ec2.describe_instances()["Reservations"].should.have.length_of(0)
|
||||||
|
# Create a Change set, and verify no resources have been created yet
|
||||||
change_set = cf_conn.create_change_set(
|
change_set = cf_conn.create_change_set(
|
||||||
StackName="NewStack",
|
StackName="NewStack",
|
||||||
TemplateBody=dummy_template_json,
|
TemplateBody=dummy_template_json,
|
||||||
ChangeSetName="NewChangeSet",
|
ChangeSetName="NewChangeSet",
|
||||||
ChangeSetType="CREATE",
|
ChangeSetType="CREATE",
|
||||||
)
|
)
|
||||||
|
ec2.describe_instances()["Reservations"].should.have.length_of(0)
|
||||||
|
cf_conn.describe_change_set(ChangeSetName="NewChangeSet")["Status"].should.equal(
|
||||||
|
"REVIEW_IN_PROGRESS"
|
||||||
|
)
|
||||||
|
# Execute change set
|
||||||
cf_conn.execute_change_set(ChangeSetName=change_set["Id"])
|
cf_conn.execute_change_set(ChangeSetName=change_set["Id"])
|
||||||
|
# Verify that the status has changed, and the appropriate resources have been created
|
||||||
|
cf_conn.describe_change_set(ChangeSetName="NewChangeSet")["Status"].should.equal(
|
||||||
|
"CREATE_COMPLETE"
|
||||||
|
)
|
||||||
|
ec2.describe_instances()["Reservations"].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudformation
|
@mock_cloudformation
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
import boto
|
import boto
|
||||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||||
|
from boto.s3.key import Key
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_cloudwatch_deprecated
|
from moto import mock_cloudwatch_deprecated, mock_s3_deprecated
|
||||||
|
|
||||||
|
|
||||||
def alarm_fixture(name="tester", action=None):
|
def alarm_fixture(name="tester", action=None):
|
||||||
@ -83,10 +84,11 @@ def test_put_metric_data():
|
|||||||
)
|
)
|
||||||
|
|
||||||
metrics = conn.list_metrics()
|
metrics = conn.list_metrics()
|
||||||
metrics.should.have.length_of(1)
|
metric_names = [m for m in metrics if m.name == "metric"]
|
||||||
|
metric_names.should.have(1)
|
||||||
metric = metrics[0]
|
metric = metrics[0]
|
||||||
metric.namespace.should.equal("tester")
|
metric.namespace.should.equal("tester")
|
||||||
metric.name.should.equal("metric")
|
metric.name.should.equal("Metric:metric")
|
||||||
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
||||||
|
|
||||||
|
|
||||||
@ -153,3 +155,35 @@ def test_get_metric_statistics():
|
|||||||
datapoint = datapoints[0]
|
datapoint = datapoints[0]
|
||||||
datapoint.should.have.key("Minimum").which.should.equal(1.5)
|
datapoint.should.have.key("Minimum").which.should.equal(1.5)
|
||||||
datapoint.should.have.key("Timestamp").which.should.equal(metric_timestamp)
|
datapoint.should.have.key("Timestamp").which.should.equal(metric_timestamp)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_s3_deprecated
|
||||||
|
@mock_cloudwatch_deprecated
|
||||||
|
def test_cloudwatch_return_s3_metrics():
|
||||||
|
|
||||||
|
region = "us-east-1"
|
||||||
|
|
||||||
|
cw = boto.ec2.cloudwatch.connect_to_region(region)
|
||||||
|
s3 = boto.s3.connect_to_region(region)
|
||||||
|
|
||||||
|
bucket_name_1 = "test-bucket-1"
|
||||||
|
bucket_name_2 = "test-bucket-2"
|
||||||
|
|
||||||
|
bucket1 = s3.create_bucket(bucket_name=bucket_name_1)
|
||||||
|
key = Key(bucket1)
|
||||||
|
key.key = "the-key"
|
||||||
|
key.set_contents_from_string("foobar" * 4)
|
||||||
|
s3.create_bucket(bucket_name=bucket_name_2)
|
||||||
|
|
||||||
|
metrics_s3_bucket_1 = cw.list_metrics(dimensions={"BucketName": bucket_name_1})
|
||||||
|
# Verify that the OOTB S3 metrics are available for the created buckets
|
||||||
|
len(metrics_s3_bucket_1).should.be(2)
|
||||||
|
metric_names = [m.name for m in metrics_s3_bucket_1]
|
||||||
|
sorted(metric_names).should.equal(
|
||||||
|
["Metric:BucketSizeBytes", "Metric:NumberOfObjects"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Explicit clean up - the metrics for these buckets are messing with subsequent tests
|
||||||
|
key.delete()
|
||||||
|
s3.delete_bucket(bucket_name_1)
|
||||||
|
s3.delete_bucket(bucket_name_2)
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
import boto3
|
import boto3
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from freezegun import freeze_time
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
import pytz
|
import pytz
|
||||||
@ -154,7 +155,7 @@ def test_put_metric_data_no_dimensions():
|
|||||||
metrics.should.have.length_of(1)
|
metrics.should.have.length_of(1)
|
||||||
metric = metrics[0]
|
metric = metrics[0]
|
||||||
metric["Namespace"].should.equal("tester")
|
metric["Namespace"].should.equal("tester")
|
||||||
metric["MetricName"].should.equal("metric")
|
metric["MetricName"].should.equal("Metric:metric")
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudwatch
|
@mock_cloudwatch
|
||||||
@ -182,7 +183,7 @@ def test_put_metric_data_with_statistics():
|
|||||||
metrics.should.have.length_of(1)
|
metrics.should.have.length_of(1)
|
||||||
metric = metrics[0]
|
metric = metrics[0]
|
||||||
metric["Namespace"].should.equal("tester")
|
metric["Namespace"].should.equal("tester")
|
||||||
metric["MetricName"].should.equal("statmetric")
|
metric["MetricName"].should.equal("Metric:statmetric")
|
||||||
# TODO: test statistics - https://github.com/spulec/moto/issues/1615
|
# TODO: test statistics - https://github.com/spulec/moto/issues/1615
|
||||||
|
|
||||||
|
|
||||||
@ -211,6 +212,35 @@ def test_get_metric_statistics():
|
|||||||
datapoint["Sum"].should.equal(1.5)
|
datapoint["Sum"].should.equal(1.5)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudwatch
|
||||||
|
@freeze_time("2020-02-10 18:44:05")
|
||||||
|
def test_custom_timestamp():
|
||||||
|
utc_now = datetime.now(tz=pytz.utc)
|
||||||
|
time = "2020-02-10T18:44:09Z"
|
||||||
|
cw = boto3.client("cloudwatch", "eu-west-1")
|
||||||
|
|
||||||
|
cw.put_metric_data(
|
||||||
|
Namespace="tester",
|
||||||
|
MetricData=[dict(MetricName="metric1", Value=1.5, Timestamp=time)],
|
||||||
|
)
|
||||||
|
|
||||||
|
cw.put_metric_data(
|
||||||
|
Namespace="tester",
|
||||||
|
MetricData=[
|
||||||
|
dict(MetricName="metric2", Value=1.5, Timestamp=datetime(2020, 2, 10))
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
stats = cw.get_metric_statistics(
|
||||||
|
Namespace="tester",
|
||||||
|
MetricName="metric",
|
||||||
|
StartTime=utc_now - timedelta(seconds=60),
|
||||||
|
EndTime=utc_now + timedelta(seconds=60),
|
||||||
|
Period=60,
|
||||||
|
Statistics=["SampleCount", "Sum"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock_cloudwatch
|
@mock_cloudwatch
|
||||||
def test_list_metrics():
|
def test_list_metrics():
|
||||||
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||||
@ -233,8 +263,16 @@ def test_list_metrics():
|
|||||||
# Verify format
|
# Verify format
|
||||||
res.should.equal(
|
res.should.equal(
|
||||||
[
|
[
|
||||||
{u"Namespace": "list_test_1/", u"Dimensions": [], u"MetricName": "metric1"},
|
{
|
||||||
{u"Namespace": "list_test_1/", u"Dimensions": [], u"MetricName": "metric1"},
|
u"Namespace": "list_test_1/",
|
||||||
|
u"Dimensions": [],
|
||||||
|
u"MetricName": "Metric:metric1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u"Namespace": "list_test_1/",
|
||||||
|
u"Dimensions": [],
|
||||||
|
u"MetricName": "Metric:metric1",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
# Verify unknown namespace still has no results
|
# Verify unknown namespace still has no results
|
||||||
@ -292,3 +330,232 @@ def create_metrics(cloudwatch, namespace, metrics=5, data_points=5):
|
|||||||
Namespace=namespace,
|
Namespace=namespace,
|
||||||
MetricData=[{"MetricName": metric_name, "Value": j, "Unit": "Seconds"}],
|
MetricData=[{"MetricName": metric_name, "Value": j, "Unit": "Seconds"}],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudwatch
|
||||||
|
def test_get_metric_data_within_timeframe():
|
||||||
|
utc_now = datetime.now(tz=pytz.utc)
|
||||||
|
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||||
|
namespace1 = "my_namespace/"
|
||||||
|
# put metric data
|
||||||
|
values = [0, 2, 4, 3.5, 7, 100]
|
||||||
|
cloudwatch.put_metric_data(
|
||||||
|
Namespace=namespace1,
|
||||||
|
MetricData=[
|
||||||
|
{"MetricName": "metric1", "Value": val, "Unit": "Seconds"} for val in values
|
||||||
|
],
|
||||||
|
)
|
||||||
|
# get_metric_data
|
||||||
|
stats = ["Average", "Sum", "Minimum", "Maximum"]
|
||||||
|
response = cloudwatch.get_metric_data(
|
||||||
|
MetricDataQueries=[
|
||||||
|
{
|
||||||
|
"Id": "result_" + stat,
|
||||||
|
"MetricStat": {
|
||||||
|
"Metric": {"Namespace": namespace1, "MetricName": "metric1"},
|
||||||
|
"Period": 60,
|
||||||
|
"Stat": stat,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for stat in stats
|
||||||
|
],
|
||||||
|
StartTime=utc_now - timedelta(seconds=60),
|
||||||
|
EndTime=utc_now + timedelta(seconds=60),
|
||||||
|
)
|
||||||
|
#
|
||||||
|
# Assert Average/Min/Max/Sum is returned as expected
|
||||||
|
avg = [
|
||||||
|
res for res in response["MetricDataResults"] if res["Id"] == "result_Average"
|
||||||
|
][0]
|
||||||
|
avg["Label"].should.equal("metric1 Average")
|
||||||
|
avg["StatusCode"].should.equal("Complete")
|
||||||
|
[int(val) for val in avg["Values"]].should.equal([19])
|
||||||
|
|
||||||
|
sum_ = [res for res in response["MetricDataResults"] if res["Id"] == "result_Sum"][
|
||||||
|
0
|
||||||
|
]
|
||||||
|
sum_["Label"].should.equal("metric1 Sum")
|
||||||
|
sum_["StatusCode"].should.equal("Complete")
|
||||||
|
[val for val in sum_["Values"]].should.equal([sum(values)])
|
||||||
|
|
||||||
|
min_ = [
|
||||||
|
res for res in response["MetricDataResults"] if res["Id"] == "result_Minimum"
|
||||||
|
][0]
|
||||||
|
min_["Label"].should.equal("metric1 Minimum")
|
||||||
|
min_["StatusCode"].should.equal("Complete")
|
||||||
|
[int(val) for val in min_["Values"]].should.equal([0])
|
||||||
|
|
||||||
|
max_ = [
|
||||||
|
res for res in response["MetricDataResults"] if res["Id"] == "result_Maximum"
|
||||||
|
][0]
|
||||||
|
max_["Label"].should.equal("metric1 Maximum")
|
||||||
|
max_["StatusCode"].should.equal("Complete")
|
||||||
|
[int(val) for val in max_["Values"]].should.equal([100])
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudwatch
|
||||||
|
def test_get_metric_data_partially_within_timeframe():
|
||||||
|
utc_now = datetime.now(tz=pytz.utc)
|
||||||
|
yesterday = utc_now - timedelta(days=1)
|
||||||
|
last_week = utc_now - timedelta(days=7)
|
||||||
|
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||||
|
namespace1 = "my_namespace/"
|
||||||
|
# put metric data
|
||||||
|
values = [0, 2, 4, 3.5, 7, 100]
|
||||||
|
cloudwatch.put_metric_data(
|
||||||
|
Namespace=namespace1,
|
||||||
|
MetricData=[
|
||||||
|
{
|
||||||
|
"MetricName": "metric1",
|
||||||
|
"Value": 10,
|
||||||
|
"Unit": "Seconds",
|
||||||
|
"Timestamp": utc_now,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
cloudwatch.put_metric_data(
|
||||||
|
Namespace=namespace1,
|
||||||
|
MetricData=[
|
||||||
|
{
|
||||||
|
"MetricName": "metric1",
|
||||||
|
"Value": 20,
|
||||||
|
"Unit": "Seconds",
|
||||||
|
"Timestamp": yesterday,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
cloudwatch.put_metric_data(
|
||||||
|
Namespace=namespace1,
|
||||||
|
MetricData=[
|
||||||
|
{
|
||||||
|
"MetricName": "metric1",
|
||||||
|
"Value": 50,
|
||||||
|
"Unit": "Seconds",
|
||||||
|
"Timestamp": last_week,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
# get_metric_data
|
||||||
|
response = cloudwatch.get_metric_data(
|
||||||
|
MetricDataQueries=[
|
||||||
|
{
|
||||||
|
"Id": "result",
|
||||||
|
"MetricStat": {
|
||||||
|
"Metric": {"Namespace": namespace1, "MetricName": "metric1"},
|
||||||
|
"Period": 60,
|
||||||
|
"Stat": "Sum",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
StartTime=yesterday - timedelta(seconds=60),
|
||||||
|
EndTime=utc_now + timedelta(seconds=60),
|
||||||
|
)
|
||||||
|
#
|
||||||
|
# Assert Last week's data is not returned
|
||||||
|
len(response["MetricDataResults"]).should.equal(1)
|
||||||
|
sum_ = response["MetricDataResults"][0]
|
||||||
|
sum_["Label"].should.equal("metric1 Sum")
|
||||||
|
sum_["StatusCode"].should.equal("Complete")
|
||||||
|
sum_["Values"].should.equal([30.0])
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudwatch
|
||||||
|
def test_get_metric_data_outside_timeframe():
|
||||||
|
utc_now = datetime.now(tz=pytz.utc)
|
||||||
|
last_week = utc_now - timedelta(days=7)
|
||||||
|
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||||
|
namespace1 = "my_namespace/"
|
||||||
|
# put metric data
|
||||||
|
cloudwatch.put_metric_data(
|
||||||
|
Namespace=namespace1,
|
||||||
|
MetricData=[
|
||||||
|
{
|
||||||
|
"MetricName": "metric1",
|
||||||
|
"Value": 50,
|
||||||
|
"Unit": "Seconds",
|
||||||
|
"Timestamp": last_week,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
# get_metric_data
|
||||||
|
response = cloudwatch.get_metric_data(
|
||||||
|
MetricDataQueries=[
|
||||||
|
{
|
||||||
|
"Id": "result",
|
||||||
|
"MetricStat": {
|
||||||
|
"Metric": {"Namespace": namespace1, "MetricName": "metric1"},
|
||||||
|
"Period": 60,
|
||||||
|
"Stat": "Sum",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
StartTime=utc_now - timedelta(seconds=60),
|
||||||
|
EndTime=utc_now + timedelta(seconds=60),
|
||||||
|
)
|
||||||
|
#
|
||||||
|
# Assert Last week's data is not returned
|
||||||
|
len(response["MetricDataResults"]).should.equal(1)
|
||||||
|
response["MetricDataResults"][0]["Id"].should.equal("result")
|
||||||
|
response["MetricDataResults"][0]["StatusCode"].should.equal("Complete")
|
||||||
|
response["MetricDataResults"][0]["Values"].should.equal([])
|
||||||
|
|
||||||
|
|
||||||
|
@mock_cloudwatch
|
||||||
|
def test_get_metric_data_for_multiple_metrics():
|
||||||
|
utc_now = datetime.now(tz=pytz.utc)
|
||||||
|
cloudwatch = boto3.client("cloudwatch", "eu-west-1")
|
||||||
|
namespace = "my_namespace/"
|
||||||
|
# put metric data
|
||||||
|
cloudwatch.put_metric_data(
|
||||||
|
Namespace=namespace,
|
||||||
|
MetricData=[
|
||||||
|
{
|
||||||
|
"MetricName": "metric1",
|
||||||
|
"Value": 50,
|
||||||
|
"Unit": "Seconds",
|
||||||
|
"Timestamp": utc_now,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
cloudwatch.put_metric_data(
|
||||||
|
Namespace=namespace,
|
||||||
|
MetricData=[
|
||||||
|
{
|
||||||
|
"MetricName": "metric2",
|
||||||
|
"Value": 25,
|
||||||
|
"Unit": "Seconds",
|
||||||
|
"Timestamp": utc_now,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
# get_metric_data
|
||||||
|
response = cloudwatch.get_metric_data(
|
||||||
|
MetricDataQueries=[
|
||||||
|
{
|
||||||
|
"Id": "result1",
|
||||||
|
"MetricStat": {
|
||||||
|
"Metric": {"Namespace": namespace, "MetricName": "metric1"},
|
||||||
|
"Period": 60,
|
||||||
|
"Stat": "Sum",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Id": "result2",
|
||||||
|
"MetricStat": {
|
||||||
|
"Metric": {"Namespace": namespace, "MetricName": "metric2"},
|
||||||
|
"Period": 60,
|
||||||
|
"Stat": "Sum",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
StartTime=utc_now - timedelta(seconds=60),
|
||||||
|
EndTime=utc_now + timedelta(seconds=60),
|
||||||
|
)
|
||||||
|
#
|
||||||
|
len(response["MetricDataResults"]).should.equal(2)
|
||||||
|
|
||||||
|
res1 = [res for res in response["MetricDataResults"] if res["Id"] == "result1"][0]
|
||||||
|
res1["Values"].should.equal([50.0])
|
||||||
|
|
||||||
|
res2 = [res for res in response["MetricDataResults"] if res["Id"] == "result2"][0]
|
||||||
|
res2["Values"].should.equal([25.0])
|
||||||
|
@ -7,6 +7,7 @@ from nose.tools import assert_raises
|
|||||||
from moto import mock_cognitoidentity
|
from moto import mock_cognitoidentity
|
||||||
from moto.cognitoidentity.utils import get_random_identity_id
|
from moto.cognitoidentity.utils import get_random_identity_id
|
||||||
from moto.core import ACCOUNT_ID
|
from moto.core import ACCOUNT_ID
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidentity
|
@mock_cognitoidentity
|
||||||
@ -83,8 +84,10 @@ def test_describe_identity_pool_with_invalid_id_raises_error():
|
|||||||
|
|
||||||
# testing a helper function
|
# testing a helper function
|
||||||
def test_get_random_identity_id():
|
def test_get_random_identity_id():
|
||||||
assert len(get_random_identity_id("us-west-2")) > 0
|
identity_id = get_random_identity_id("us-west-2")
|
||||||
assert len(get_random_identity_id("us-west-2").split(":")[1]) == 19
|
region, id = identity_id.split(":")
|
||||||
|
region.should.equal("us-west-2")
|
||||||
|
UUID(id, version=4) # Will throw an error if it's not a valid UUID
|
||||||
|
|
||||||
|
|
||||||
@mock_cognitoidentity
|
@mock_cognitoidentity
|
||||||
@ -96,7 +99,6 @@ def test_get_id():
|
|||||||
IdentityPoolId="us-west-2:12345",
|
IdentityPoolId="us-west-2:12345",
|
||||||
Logins={"someurl": "12345"},
|
Logins={"someurl": "12345"},
|
||||||
)
|
)
|
||||||
print(result)
|
|
||||||
assert (
|
assert (
|
||||||
result.get("IdentityId", "").startswith("us-west-2")
|
result.get("IdentityId", "").startswith("us-west-2")
|
||||||
or result.get("ResponseMetadata").get("HTTPStatusCode") == 200
|
or result.get("ResponseMetadata").get("HTTPStatusCode") == 200
|
||||||
|
@ -48,6 +48,5 @@ def test_get_id():
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
print(res.data)
|
|
||||||
json_data = json.loads(res.data.decode("utf-8"))
|
json_data = json.loads(res.data.decode("utf-8"))
|
||||||
assert ":" in json_data["IdentityId"]
|
assert ":" in json_data["IdentityId"]
|
||||||
|
@ -11,6 +11,8 @@ from moto import mock_s3
|
|||||||
from moto.config import mock_config
|
from moto.config import mock_config
|
||||||
from moto.core import ACCOUNT_ID
|
from moto.core import ACCOUNT_ID
|
||||||
|
|
||||||
|
import sure # noqa
|
||||||
|
|
||||||
|
|
||||||
@mock_config
|
@mock_config
|
||||||
def test_put_configuration_recorder():
|
def test_put_configuration_recorder():
|
||||||
|
@ -1454,6 +1454,13 @@ def test_filter_expression():
|
|||||||
filter_expr.expr(row1).should.be(True)
|
filter_expr.expr(row1).should.be(True)
|
||||||
filter_expr.expr(row2).should.be(False)
|
filter_expr.expr(row2).should.be(False)
|
||||||
|
|
||||||
|
# lowercase AND test
|
||||||
|
filter_expr = moto.dynamodb2.comparisons.get_filter_expression(
|
||||||
|
"Id > :v0 and Subs < :v1", {}, {":v0": {"N": "5"}, ":v1": {"N": "7"}}
|
||||||
|
)
|
||||||
|
filter_expr.expr(row1).should.be(True)
|
||||||
|
filter_expr.expr(row2).should.be(False)
|
||||||
|
|
||||||
# OR test
|
# OR test
|
||||||
filter_expr = moto.dynamodb2.comparisons.get_filter_expression(
|
filter_expr = moto.dynamodb2.comparisons.get_filter_expression(
|
||||||
"Id = :v0 OR Id=:v1", {}, {":v0": {"N": "5"}, ":v1": {"N": "8"}}
|
"Id = :v0 OR Id=:v1", {}, {":v0": {"N": "5"}, ":v1": {"N": "8"}}
|
||||||
@ -2785,7 +2792,7 @@ def test_query_gsi_with_range_key():
|
|||||||
res = dynamodb.query(
|
res = dynamodb.query(
|
||||||
TableName="test",
|
TableName="test",
|
||||||
IndexName="test_gsi",
|
IndexName="test_gsi",
|
||||||
KeyConditionExpression="gsi_hash_key = :gsi_hash_key AND gsi_range_key = :gsi_range_key",
|
KeyConditionExpression="gsi_hash_key = :gsi_hash_key and gsi_range_key = :gsi_range_key",
|
||||||
ExpressionAttributeValues={
|
ExpressionAttributeValues={
|
||||||
":gsi_hash_key": {"S": "key1"},
|
":gsi_hash_key": {"S": "key1"},
|
||||||
":gsi_range_key": {"S": "range1"},
|
":gsi_range_key": {"S": "range1"},
|
||||||
@ -4212,6 +4219,44 @@ def test_gsi_verify_negative_number_order():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_dynamodb_max_1mb_limit():
|
||||||
|
ddb = boto3.resource("dynamodb", region_name="eu-west-1")
|
||||||
|
|
||||||
|
table_name = "populated-mock-table"
|
||||||
|
table = ddb.create_table(
|
||||||
|
TableName=table_name,
|
||||||
|
KeySchema=[
|
||||||
|
{"AttributeName": "partition_key", "KeyType": "HASH"},
|
||||||
|
{"AttributeName": "sort_key", "KeyType": "RANGE"},
|
||||||
|
],
|
||||||
|
AttributeDefinitions=[
|
||||||
|
{"AttributeName": "partition_key", "AttributeType": "S"},
|
||||||
|
{"AttributeName": "sort_key", "AttributeType": "S"},
|
||||||
|
],
|
||||||
|
BillingMode="PAY_PER_REQUEST",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Populate the table
|
||||||
|
items = [
|
||||||
|
{
|
||||||
|
"partition_key": "partition_key_val", # size=30
|
||||||
|
"sort_key": "sort_key_value____" + str(i), # size=30
|
||||||
|
}
|
||||||
|
for i in range(10000, 29999)
|
||||||
|
]
|
||||||
|
with table.batch_writer() as batch:
|
||||||
|
for item in items:
|
||||||
|
batch.put_item(Item=item)
|
||||||
|
|
||||||
|
response = table.query(
|
||||||
|
KeyConditionExpression=Key("partition_key").eq("partition_key_val")
|
||||||
|
)
|
||||||
|
# We shouldn't get everything back - the total result set is well over 1MB
|
||||||
|
len(items).should.be.greater_than(response["Count"])
|
||||||
|
response["LastEvaluatedKey"].shouldnt.be(None)
|
||||||
|
|
||||||
|
|
||||||
def assert_raise_syntax_error(client_error, token, near):
|
def assert_raise_syntax_error(client_error, token, near):
|
||||||
"""
|
"""
|
||||||
Assert whether a client_error is as expected Syntax error. Syntax error looks like: `syntax_error_template`
|
Assert whether a client_error is as expected Syntax error. Syntax error looks like: `syntax_error_template`
|
||||||
@ -4277,3 +4322,12 @@ def test_update_expression_with_multiple_set_clauses_must_be_comma_separated():
|
|||||||
assert False, "Validation exception not thrown"
|
assert False, "Validation exception not thrown"
|
||||||
except dynamodb.exceptions.ClientError as e:
|
except dynamodb.exceptions.ClientError as e:
|
||||||
assert_raise_syntax_error(e, "Mystr2", "myNum Mystr2 myNum2")
|
assert_raise_syntax_error(e, "Mystr2", "myNum Mystr2 myNum2")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_dynamodb2
|
||||||
|
def test_list_tables_exclusive_start_table_name_empty():
|
||||||
|
client = boto3.client("dynamodb", region_name="us-east-1")
|
||||||
|
|
||||||
|
resp = client.list_tables(Limit=1, ExclusiveStartTableName="whatever")
|
||||||
|
|
||||||
|
len(resp["TableNames"]).should.equal(0)
|
||||||
|
130
tests/test_eb/test_eb.py
Normal file
130
tests/test_eb/test_eb.py
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
import boto3
|
||||||
|
import sure # noqa
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
|
||||||
|
from moto import mock_elasticbeanstalk
|
||||||
|
|
||||||
|
|
||||||
|
@mock_elasticbeanstalk
|
||||||
|
def test_create_application():
|
||||||
|
# Create Elastic Beanstalk Application
|
||||||
|
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||||
|
app = conn.create_application(ApplicationName="myapp",)
|
||||||
|
app["Application"]["ApplicationName"].should.equal("myapp")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_elasticbeanstalk
|
||||||
|
def test_create_application_dup():
|
||||||
|
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||||
|
conn.create_application(ApplicationName="myapp",)
|
||||||
|
conn.create_application.when.called_with(ApplicationName="myapp",).should.throw(
|
||||||
|
ClientError
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_elasticbeanstalk
|
||||||
|
def test_describe_applications():
|
||||||
|
# Create Elastic Beanstalk Application
|
||||||
|
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||||
|
conn.create_application(ApplicationName="myapp",)
|
||||||
|
|
||||||
|
apps = conn.describe_applications()
|
||||||
|
len(apps["Applications"]).should.equal(1)
|
||||||
|
apps["Applications"][0]["ApplicationName"].should.equal("myapp")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_elasticbeanstalk
|
||||||
|
def test_create_environment():
|
||||||
|
# Create Elastic Beanstalk Environment
|
||||||
|
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||||
|
app = conn.create_application(ApplicationName="myapp",)
|
||||||
|
env = conn.create_environment(ApplicationName="myapp", EnvironmentName="myenv",)
|
||||||
|
env["EnvironmentName"].should.equal("myenv")
|
||||||
|
|
||||||
|
|
||||||
|
@mock_elasticbeanstalk
|
||||||
|
def test_describe_environments():
|
||||||
|
# List Elastic Beanstalk Envs
|
||||||
|
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||||
|
conn.create_application(ApplicationName="myapp",)
|
||||||
|
conn.create_environment(
|
||||||
|
ApplicationName="myapp", EnvironmentName="myenv",
|
||||||
|
)
|
||||||
|
|
||||||
|
envs = conn.describe_environments()
|
||||||
|
envs = envs["Environments"]
|
||||||
|
len(envs).should.equal(1)
|
||||||
|
envs[0]["ApplicationName"].should.equal("myapp")
|
||||||
|
envs[0]["EnvironmentName"].should.equal("myenv")
|
||||||
|
|
||||||
|
|
||||||
|
def tags_dict_to_list(tag_dict):
|
||||||
|
tag_list = []
|
||||||
|
for key, value in tag_dict.items():
|
||||||
|
tag_list.append({"Key": key, "Value": value})
|
||||||
|
return tag_list
|
||||||
|
|
||||||
|
|
||||||
|
def tags_list_to_dict(tag_list):
|
||||||
|
tag_dict = {}
|
||||||
|
for tag in tag_list:
|
||||||
|
tag_dict[tag["Key"]] = tag["Value"]
|
||||||
|
return tag_dict
|
||||||
|
|
||||||
|
|
||||||
|
@mock_elasticbeanstalk
|
||||||
|
def test_create_environment_tags():
|
||||||
|
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||||
|
conn.create_application(ApplicationName="myapp",)
|
||||||
|
env_tags = {"initial key": "initial value"}
|
||||||
|
env = conn.create_environment(
|
||||||
|
ApplicationName="myapp",
|
||||||
|
EnvironmentName="myenv",
|
||||||
|
Tags=tags_dict_to_list(env_tags),
|
||||||
|
)
|
||||||
|
|
||||||
|
tags = conn.list_tags_for_resource(ResourceArn=env["EnvironmentArn"],)
|
||||||
|
tags["ResourceArn"].should.equal(env["EnvironmentArn"])
|
||||||
|
tags_list_to_dict(tags["ResourceTags"]).should.equal(env_tags)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_elasticbeanstalk
|
||||||
|
def test_update_tags():
|
||||||
|
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||||
|
conn.create_application(ApplicationName="myapp",)
|
||||||
|
env_tags = {
|
||||||
|
"initial key": "initial value",
|
||||||
|
"to remove": "delete me",
|
||||||
|
"to update": "original",
|
||||||
|
}
|
||||||
|
env = conn.create_environment(
|
||||||
|
ApplicationName="myapp",
|
||||||
|
EnvironmentName="myenv",
|
||||||
|
Tags=tags_dict_to_list(env_tags),
|
||||||
|
)
|
||||||
|
|
||||||
|
extra_env_tags = {
|
||||||
|
"to update": "new",
|
||||||
|
"extra key": "extra value",
|
||||||
|
}
|
||||||
|
conn.update_tags_for_resource(
|
||||||
|
ResourceArn=env["EnvironmentArn"],
|
||||||
|
TagsToAdd=tags_dict_to_list(extra_env_tags),
|
||||||
|
TagsToRemove=["to remove"],
|
||||||
|
)
|
||||||
|
|
||||||
|
total_env_tags = env_tags.copy()
|
||||||
|
total_env_tags.update(extra_env_tags)
|
||||||
|
del total_env_tags["to remove"]
|
||||||
|
|
||||||
|
tags = conn.list_tags_for_resource(ResourceArn=env["EnvironmentArn"],)
|
||||||
|
tags["ResourceArn"].should.equal(env["EnvironmentArn"])
|
||||||
|
tags_list_to_dict(tags["ResourceTags"]).should.equal(total_env_tags)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_elasticbeanstalk
|
||||||
|
def test_list_available_solution_stacks():
|
||||||
|
conn = boto3.client("elasticbeanstalk", region_name="us-east-1")
|
||||||
|
stacks = conn.list_available_solution_stacks()
|
||||||
|
len(stacks["SolutionStacks"]).should.be.greater_than(0)
|
||||||
|
len(stacks["SolutionStacks"]).should.be.equal(len(stacks["SolutionStackDetails"]))
|
@ -9,6 +9,7 @@ from nose.tools import assert_raises
|
|||||||
import base64
|
import base64
|
||||||
import datetime
|
import datetime
|
||||||
import ipaddress
|
import ipaddress
|
||||||
|
import json
|
||||||
|
|
||||||
import six
|
import six
|
||||||
import boto
|
import boto
|
||||||
@ -18,7 +19,7 @@ from boto.exception import EC2ResponseError, EC2ResponseError
|
|||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2_deprecated, mock_ec2
|
from moto import mock_ec2_deprecated, mock_ec2, mock_cloudformation
|
||||||
from tests.helpers import requires_boto_gte
|
from tests.helpers import requires_boto_gte
|
||||||
|
|
||||||
|
|
||||||
@ -1334,6 +1335,12 @@ def test_create_instance_ebs_optimized():
|
|||||||
instance.load()
|
instance.load()
|
||||||
instance.ebs_optimized.should.be(False)
|
instance.ebs_optimized.should.be(False)
|
||||||
|
|
||||||
|
instance = ec2_resource.create_instances(
|
||||||
|
ImageId="ami-12345678", MaxCount=1, MinCount=1,
|
||||||
|
)[0]
|
||||||
|
instance.load()
|
||||||
|
instance.ebs_optimized.should.be(False)
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_run_multiple_instances_in_same_command():
|
def test_run_multiple_instances_in_same_command():
|
||||||
@ -1414,3 +1421,40 @@ def test_describe_instance_attribute():
|
|||||||
invalid_instance_attribute=invalid_instance_attribute
|
invalid_instance_attribute=invalid_instance_attribute
|
||||||
)
|
)
|
||||||
ex.exception.response["Error"]["Message"].should.equal(message)
|
ex.exception.response["Error"]["Message"].should.equal(message)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_ec2
|
||||||
|
@mock_cloudformation
|
||||||
|
def test_volume_size_through_cloudformation():
|
||||||
|
ec2 = boto3.client("ec2", region_name="us-east-1")
|
||||||
|
cf = boto3.client("cloudformation", region_name="us-east-1")
|
||||||
|
|
||||||
|
volume_template = {
|
||||||
|
"AWSTemplateFormatVersion": "2010-09-09",
|
||||||
|
"Resources": {
|
||||||
|
"testInstance": {
|
||||||
|
"Type": "AWS::EC2::Instance",
|
||||||
|
"Properties": {
|
||||||
|
"ImageId": "ami-d3adb33f",
|
||||||
|
"KeyName": "dummy",
|
||||||
|
"InstanceType": "t2.micro",
|
||||||
|
"BlockDeviceMappings": [
|
||||||
|
{"DeviceName": "/dev/sda2", "Ebs": {"VolumeSize": "50"}}
|
||||||
|
],
|
||||||
|
"Tags": [
|
||||||
|
{"Key": "foo", "Value": "bar"},
|
||||||
|
{"Key": "blah", "Value": "baz"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
template_json = json.dumps(volume_template)
|
||||||
|
cf.create_stack(StackName="test_stack", TemplateBody=template_json)
|
||||||
|
instances = ec2.describe_instances()
|
||||||
|
volume = instances["Reservations"][0]["Instances"][0]["BlockDeviceMappings"][0][
|
||||||
|
"Ebs"
|
||||||
|
]
|
||||||
|
|
||||||
|
volumes = ec2.describe_volumes(VolumeIds=[volume["VolumeId"]])
|
||||||
|
volumes["Volumes"][0]["Size"].should.equal(50)
|
||||||
|
@ -52,6 +52,29 @@ def test_get_database_not_exits():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_glue
|
||||||
|
def test_get_databases_empty():
|
||||||
|
client = boto3.client("glue", region_name="us-east-1")
|
||||||
|
response = client.get_databases()
|
||||||
|
response["DatabaseList"].should.have.length_of(0)
|
||||||
|
|
||||||
|
|
||||||
|
@mock_glue
|
||||||
|
def test_get_databases_several_items():
|
||||||
|
client = boto3.client("glue", region_name="us-east-1")
|
||||||
|
database_name_1, database_name_2 = "firstdatabase", "seconddatabase"
|
||||||
|
|
||||||
|
helpers.create_database(client, database_name_1)
|
||||||
|
helpers.create_database(client, database_name_2)
|
||||||
|
|
||||||
|
database_list = sorted(
|
||||||
|
client.get_databases()["DatabaseList"], key=lambda x: x["Name"]
|
||||||
|
)
|
||||||
|
database_list.should.have.length_of(2)
|
||||||
|
database_list[0].should.equal({"Name": database_name_1})
|
||||||
|
database_list[1].should.equal({"Name": database_name_2})
|
||||||
|
|
||||||
|
|
||||||
@mock_glue
|
@mock_glue
|
||||||
def test_create_table():
|
def test_create_table():
|
||||||
client = boto3.client("glue", region_name="us-east-1")
|
client = boto3.client("glue", region_name="us-east-1")
|
||||||
|
@ -3256,7 +3256,8 @@ def test_boto3_put_object_tagging_on_earliest_version():
|
|||||||
# Older version has tags while the most recent does not
|
# Older version has tags while the most recent does not
|
||||||
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id)
|
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id)
|
||||||
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||||
resp["TagSet"].should.equal(
|
sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
|
||||||
|
sorted_tagset.should.equal(
|
||||||
[{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}]
|
[{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -3334,7 +3335,8 @@ def test_boto3_put_object_tagging_on_both_version():
|
|||||||
|
|
||||||
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id)
|
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id)
|
||||||
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||||
resp["TagSet"].should.equal(
|
sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
|
||||||
|
sorted_tagset.should.equal(
|
||||||
[{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}]
|
[{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -3342,7 +3344,8 @@ def test_boto3_put_object_tagging_on_both_version():
|
|||||||
Bucket=bucket_name, Key=key, VersionId=second_object.id
|
Bucket=bucket_name, Key=key, VersionId=second_object.id
|
||||||
)
|
)
|
||||||
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
|
||||||
resp["TagSet"].should.equal(
|
sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
|
||||||
|
sorted_tagset.should.equal(
|
||||||
[{"Key": "item1", "Value": "baz"}, {"Key": "item2", "Value": "bin"}]
|
[{"Key": "item1", "Value": "baz"}, {"Key": "item2", "Value": "bin"}]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -3744,6 +3747,28 @@ def test_root_dir_with_empty_name_works():
|
|||||||
store_and_read_back_a_key("/")
|
store_and_read_back_a_key("/")
|
||||||
|
|
||||||
|
|
||||||
|
@parameterized(["mybucket", "my.bucket"])
|
||||||
|
@mock_s3
|
||||||
|
def test_leading_slashes_not_removed(bucket_name):
|
||||||
|
"""Make sure that leading slashes are not removed internally."""
|
||||||
|
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||||
|
s3.create_bucket(Bucket=bucket_name)
|
||||||
|
|
||||||
|
uploaded_key = "/key"
|
||||||
|
invalid_key_1 = "key"
|
||||||
|
invalid_key_2 = "//key"
|
||||||
|
|
||||||
|
s3.put_object(Bucket=bucket_name, Key=uploaded_key, Body=b"Some body")
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
s3.get_object(Bucket=bucket_name, Key=invalid_key_1)
|
||||||
|
e.exception.response["Error"]["Code"].should.equal("NoSuchKey")
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
s3.get_object(Bucket=bucket_name, Key=invalid_key_2)
|
||||||
|
e.exception.response["Error"]["Code"].should.equal("NoSuchKey")
|
||||||
|
|
||||||
|
|
||||||
@parameterized(
|
@parameterized(
|
||||||
[("foo/bar/baz",), ("foo",), ("foo/run_dt%3D2019-01-01%252012%253A30%253A00",)]
|
[("foo/bar/baz",), ("foo",), ("foo/run_dt%3D2019-01-01%252012%253A30%253A00",)]
|
||||||
)
|
)
|
||||||
@ -4293,24 +4318,17 @@ def test_s3_config_dict():
|
|||||||
FakeAcl,
|
FakeAcl,
|
||||||
FakeGrant,
|
FakeGrant,
|
||||||
FakeGrantee,
|
FakeGrantee,
|
||||||
FakeTag,
|
|
||||||
FakeTagging,
|
|
||||||
FakeTagSet,
|
|
||||||
OWNER,
|
OWNER,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Without any buckets:
|
# Without any buckets:
|
||||||
assert not s3_config_query.get_config_resource("some_bucket")
|
assert not s3_config_query.get_config_resource("some_bucket")
|
||||||
|
|
||||||
tags = FakeTagging(
|
tags = {"someTag": "someValue", "someOtherTag": "someOtherValue"}
|
||||||
FakeTagSet(
|
|
||||||
[FakeTag("someTag", "someValue"), FakeTag("someOtherTag", "someOtherValue")]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# With 1 bucket in us-west-2:
|
# With 1 bucket in us-west-2:
|
||||||
s3_config_query.backends["global"].create_bucket("bucket1", "us-west-2")
|
s3_config_query.backends["global"].create_bucket("bucket1", "us-west-2")
|
||||||
s3_config_query.backends["global"].put_bucket_tagging("bucket1", tags)
|
s3_config_query.backends["global"].put_bucket_tags("bucket1", tags)
|
||||||
|
|
||||||
# With a log bucket:
|
# With a log bucket:
|
||||||
s3_config_query.backends["global"].create_bucket("logbucket", "us-west-2")
|
s3_config_query.backends["global"].create_bucket("logbucket", "us-west-2")
|
||||||
|
@ -77,3 +77,34 @@ def test_extract_tag_names():
|
|||||||
expected = ["key1", "key2"]
|
expected = ["key1", "key2"]
|
||||||
|
|
||||||
expected.should.be.equal(actual)
|
expected.should.be.equal(actual)
|
||||||
|
|
||||||
|
|
||||||
|
def test_copy_non_existing_arn():
|
||||||
|
svc = TaggingService()
|
||||||
|
tags = [{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
|
||||||
|
svc.tag_resource("new_arn", tags)
|
||||||
|
#
|
||||||
|
svc.copy_tags("non_existing_arn", "new_arn")
|
||||||
|
# Copying from a non-existing ARN should a NOOP
|
||||||
|
# Assert the old tags still exist
|
||||||
|
actual = sorted(
|
||||||
|
svc.list_tags_for_resource("new_arn")["Tags"], key=lambda t: t["Key"]
|
||||||
|
)
|
||||||
|
actual.should.equal(tags)
|
||||||
|
|
||||||
|
|
||||||
|
def test_copy_existing_arn():
|
||||||
|
svc = TaggingService()
|
||||||
|
tags_old_arn = [{"Key": "key1", "Value": "value1"}]
|
||||||
|
tags_new_arn = [{"Key": "key2", "Value": "value2"}]
|
||||||
|
svc.tag_resource("old_arn", tags_old_arn)
|
||||||
|
svc.tag_resource("new_arn", tags_new_arn)
|
||||||
|
#
|
||||||
|
svc.copy_tags("old_arn", "new_arn")
|
||||||
|
# Assert the old tags still exist
|
||||||
|
actual = sorted(
|
||||||
|
svc.list_tags_for_resource("new_arn")["Tags"], key=lambda t: t["Key"]
|
||||||
|
)
|
||||||
|
actual.should.equal(
|
||||||
|
[{"Key": "key1", "Value": "value1"}, {"Key": "key2", "Value": "value2"}]
|
||||||
|
)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user