Merge pull request #2173 from sthuber90/master
Extending IoT Policy and Jobs functionality
This commit is contained in:
commit
b252ab6675
@ -22,6 +22,15 @@ class InvalidRequestException(IoTClientError):
|
||||
)
|
||||
|
||||
|
||||
class InvalidStateTransitionException(IoTClientError):
|
||||
def __init__(self, msg=None):
|
||||
self.code = 409
|
||||
super(InvalidStateTransitionException, self).__init__(
|
||||
"InvalidStateTransitionException",
|
||||
msg or "An attempt was made to change to an invalid state.",
|
||||
)
|
||||
|
||||
|
||||
class VersionConflictException(IoTClientError):
|
||||
def __init__(self, name):
|
||||
self.code = 409
|
||||
|
@ -17,6 +17,7 @@ from .exceptions import (
|
||||
DeleteConflictException,
|
||||
ResourceNotFoundException,
|
||||
InvalidRequestException,
|
||||
InvalidStateTransitionException,
|
||||
VersionConflictException,
|
||||
)
|
||||
|
||||
@ -29,7 +30,7 @@ class FakeThing(BaseModel):
|
||||
self.attributes = attributes
|
||||
self.arn = "arn:aws:iot:%s:1:thing/%s" % (self.region_name, thing_name)
|
||||
self.version = 1
|
||||
# TODO: we need to handle 'version'?
|
||||
# TODO: we need to handle "version"?
|
||||
|
||||
# for iot-data
|
||||
self.thing_shadow = None
|
||||
@ -174,18 +175,19 @@ class FakeCertificate(BaseModel):
|
||||
|
||||
|
||||
class FakePolicy(BaseModel):
|
||||
def __init__(self, name, document, region_name):
|
||||
def __init__(self, name, document, region_name, default_version_id="1"):
|
||||
self.name = name
|
||||
self.document = document
|
||||
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, name)
|
||||
self.version = "1" # TODO: handle version
|
||||
self.default_version_id = default_version_id
|
||||
self.versions = [FakePolicyVersion(self.name, document, True, region_name)]
|
||||
|
||||
def to_get_dict(self):
|
||||
return {
|
||||
"policyName": self.name,
|
||||
"policyArn": self.arn,
|
||||
"policyDocument": self.document,
|
||||
"defaultVersionId": self.version,
|
||||
"defaultVersionId": self.default_version_id,
|
||||
}
|
||||
|
||||
def to_dict_at_creation(self):
|
||||
@ -193,13 +195,52 @@ class FakePolicy(BaseModel):
|
||||
"policyName": self.name,
|
||||
"policyArn": self.arn,
|
||||
"policyDocument": self.document,
|
||||
"policyVersionId": self.version,
|
||||
"policyVersionId": self.default_version_id,
|
||||
}
|
||||
|
||||
def to_dict(self):
|
||||
return {"policyName": self.name, "policyArn": self.arn}
|
||||
|
||||
|
||||
class FakePolicyVersion(object):
|
||||
def __init__(self, policy_name, document, is_default, region_name):
|
||||
self.name = policy_name
|
||||
self.arn = "arn:aws:iot:%s:1:policy/%s" % (region_name, policy_name)
|
||||
self.document = document or {}
|
||||
self.is_default = is_default
|
||||
self.version_id = "1"
|
||||
|
||||
self.create_datetime = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||
self.last_modified_datetime = time.mktime(datetime(2015, 1, 2).timetuple())
|
||||
|
||||
def to_get_dict(self):
|
||||
return {
|
||||
"policyName": self.name,
|
||||
"policyArn": self.arn,
|
||||
"policyDocument": self.document,
|
||||
"policyVersionId": self.version_id,
|
||||
"isDefaultVersion": self.is_default,
|
||||
"creationDate": self.create_datetime,
|
||||
"lastModifiedDate": self.last_modified_datetime,
|
||||
"generationId": self.version_id,
|
||||
}
|
||||
|
||||
def to_dict_at_creation(self):
|
||||
return {
|
||||
"policyArn": self.arn,
|
||||
"policyDocument": self.document,
|
||||
"policyVersionId": self.version_id,
|
||||
"isDefaultVersion": self.is_default,
|
||||
}
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"versionId": self.version_id,
|
||||
"isDefaultVersion": self.is_default,
|
||||
"createDate": self.create_datetime,
|
||||
}
|
||||
|
||||
|
||||
class FakeJob(BaseModel):
|
||||
JOB_ID_REGEX_PATTERN = "[a-zA-Z0-9_-]"
|
||||
JOB_ID_REGEX = re.compile(JOB_ID_REGEX_PATTERN)
|
||||
@ -226,12 +267,14 @@ class FakeJob(BaseModel):
|
||||
self.targets = targets
|
||||
self.document_source = document_source
|
||||
self.document = document
|
||||
self.force = False
|
||||
self.description = description
|
||||
self.presigned_url_config = presigned_url_config
|
||||
self.target_selection = target_selection
|
||||
self.job_executions_rollout_config = job_executions_rollout_config
|
||||
self.status = None # IN_PROGRESS | CANCELED | COMPLETED
|
||||
self.status = "QUEUED" # IN_PROGRESS | CANCELED | COMPLETED
|
||||
self.comment = None
|
||||
self.reason_code = None
|
||||
self.created_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||
self.completed_at = None
|
||||
@ -258,9 +301,11 @@ class FakeJob(BaseModel):
|
||||
"jobExecutionsRolloutConfig": self.job_executions_rollout_config,
|
||||
"status": self.status,
|
||||
"comment": self.comment,
|
||||
"forceCanceled": self.force,
|
||||
"reasonCode": self.reason_code,
|
||||
"createdAt": self.created_at,
|
||||
"lastUpdatedAt": self.last_updated_at,
|
||||
"completedAt": self.completedAt,
|
||||
"completedAt": self.completed_at,
|
||||
"jobProcessDetails": self.job_process_details,
|
||||
"documentParameters": self.document_parameters,
|
||||
"document": self.document,
|
||||
@ -275,12 +320,67 @@ class FakeJob(BaseModel):
|
||||
return regex_match and length_match
|
||||
|
||||
|
||||
class FakeJobExecution(BaseModel):
|
||||
def __init__(
|
||||
self,
|
||||
job_id,
|
||||
thing_arn,
|
||||
status="QUEUED",
|
||||
force_canceled=False,
|
||||
status_details_map={},
|
||||
):
|
||||
self.job_id = job_id
|
||||
self.status = status # IN_PROGRESS | CANCELED | COMPLETED
|
||||
self.force_canceled = force_canceled
|
||||
self.status_details_map = status_details_map
|
||||
self.thing_arn = thing_arn
|
||||
self.queued_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||
self.started_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||
self.last_updated_at = time.mktime(datetime(2015, 1, 1).timetuple())
|
||||
self.execution_number = 123
|
||||
self.version_number = 123
|
||||
self.approximate_seconds_before_time_out = 123
|
||||
|
||||
def to_get_dict(self):
|
||||
obj = {
|
||||
"jobId": self.job_id,
|
||||
"status": self.status,
|
||||
"forceCanceled": self.force_canceled,
|
||||
"statusDetails": {"detailsMap": self.status_details_map},
|
||||
"thingArn": self.thing_arn,
|
||||
"queuedAt": self.queued_at,
|
||||
"startedAt": self.started_at,
|
||||
"lastUpdatedAt": self.last_updated_at,
|
||||
"executionNumber": self.execution_number,
|
||||
"versionNumber": self.version_number,
|
||||
"approximateSecondsBeforeTimedOut": self.approximate_seconds_before_time_out,
|
||||
}
|
||||
|
||||
return obj
|
||||
|
||||
def to_dict(self):
|
||||
obj = {
|
||||
"jobId": self.job_id,
|
||||
"thingArn": self.thing_arn,
|
||||
"jobExecutionSummary": {
|
||||
"status": self.status,
|
||||
"queuedAt": self.queued_at,
|
||||
"startedAt": self.started_at,
|
||||
"lastUpdatedAt": self.last_updated_at,
|
||||
"executionNumber": self.execution_number,
|
||||
},
|
||||
}
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
class IoTBackend(BaseBackend):
|
||||
def __init__(self, region_name=None):
|
||||
super(IoTBackend, self).__init__()
|
||||
self.region_name = region_name
|
||||
self.things = OrderedDict()
|
||||
self.jobs = OrderedDict()
|
||||
self.job_executions = OrderedDict()
|
||||
self.thing_types = OrderedDict()
|
||||
self.thing_groups = OrderedDict()
|
||||
self.certificates = OrderedDict()
|
||||
@ -535,6 +635,28 @@ class IoTBackend(BaseBackend):
|
||||
self.policies[policy.name] = policy
|
||||
return policy
|
||||
|
||||
def attach_policy(self, policy_name, target):
|
||||
principal = self._get_principal(target)
|
||||
policy = self.get_policy(policy_name)
|
||||
k = (target, policy_name)
|
||||
if k in self.principal_policies:
|
||||
return
|
||||
self.principal_policies[k] = (principal, policy)
|
||||
|
||||
def detach_policy(self, policy_name, target):
|
||||
# this may raises ResourceNotFoundException
|
||||
self._get_principal(target)
|
||||
self.get_policy(policy_name)
|
||||
|
||||
k = (target, policy_name)
|
||||
if k not in self.principal_policies:
|
||||
raise ResourceNotFoundException()
|
||||
del self.principal_policies[k]
|
||||
|
||||
def list_attached_policies(self, target):
|
||||
policies = [v[1] for k, v in self.principal_policies.items() if k[0] == target]
|
||||
return policies
|
||||
|
||||
def list_policies(self):
|
||||
policies = self.policies.values()
|
||||
return policies
|
||||
@ -559,6 +681,60 @@ class IoTBackend(BaseBackend):
|
||||
policy = self.get_policy(policy_name)
|
||||
del self.policies[policy.name]
|
||||
|
||||
def create_policy_version(self, policy_name, policy_document, set_as_default):
|
||||
policy = self.get_policy(policy_name)
|
||||
if not policy:
|
||||
raise ResourceNotFoundException()
|
||||
version = FakePolicyVersion(
|
||||
policy_name, policy_document, set_as_default, self.region_name
|
||||
)
|
||||
policy.versions.append(version)
|
||||
version.version_id = "{0}".format(len(policy.versions))
|
||||
if set_as_default:
|
||||
self.set_default_policy_version(policy_name, version.version_id)
|
||||
return version
|
||||
|
||||
def set_default_policy_version(self, policy_name, version_id):
|
||||
policy = self.get_policy(policy_name)
|
||||
if not policy:
|
||||
raise ResourceNotFoundException()
|
||||
for version in policy.versions:
|
||||
if version.version_id == version_id:
|
||||
version.is_default = True
|
||||
policy.default_version_id = version.version_id
|
||||
policy.document = version.document
|
||||
else:
|
||||
version.is_default = False
|
||||
|
||||
def get_policy_version(self, policy_name, version_id):
|
||||
policy = self.get_policy(policy_name)
|
||||
if not policy:
|
||||
raise ResourceNotFoundException()
|
||||
for version in policy.versions:
|
||||
if version.version_id == version_id:
|
||||
return version
|
||||
raise ResourceNotFoundException()
|
||||
|
||||
def list_policy_versions(self, policy_name):
|
||||
policy = self.get_policy(policy_name)
|
||||
if not policy:
|
||||
raise ResourceNotFoundException()
|
||||
return policy.versions
|
||||
|
||||
def delete_policy_version(self, policy_name, version_id):
|
||||
policy = self.get_policy(policy_name)
|
||||
if not policy:
|
||||
raise ResourceNotFoundException()
|
||||
if version_id == policy.default_version_id:
|
||||
raise InvalidRequestException(
|
||||
"Cannot delete the default version of a policy"
|
||||
)
|
||||
for i, v in enumerate(policy.versions):
|
||||
if v.version_id == version_id:
|
||||
del policy.versions[i]
|
||||
return
|
||||
raise ResourceNotFoundException()
|
||||
|
||||
def _get_principal(self, principal_arn):
|
||||
"""
|
||||
raise ResourceNotFoundException
|
||||
@ -574,14 +750,6 @@ class IoTBackend(BaseBackend):
|
||||
pass
|
||||
raise ResourceNotFoundException()
|
||||
|
||||
def attach_policy(self, policy_name, target):
|
||||
principal = self._get_principal(target)
|
||||
policy = self.get_policy(policy_name)
|
||||
k = (target, policy_name)
|
||||
if k in self.principal_policies:
|
||||
return
|
||||
self.principal_policies[k] = (principal, policy)
|
||||
|
||||
def attach_principal_policy(self, policy_name, principal_arn):
|
||||
principal = self._get_principal(principal_arn)
|
||||
policy = self.get_policy(policy_name)
|
||||
@ -590,15 +758,6 @@ class IoTBackend(BaseBackend):
|
||||
return
|
||||
self.principal_policies[k] = (principal, policy)
|
||||
|
||||
def detach_policy(self, policy_name, target):
|
||||
# this may raises ResourceNotFoundException
|
||||
self._get_principal(target)
|
||||
self.get_policy(policy_name)
|
||||
k = (target, policy_name)
|
||||
if k not in self.principal_policies:
|
||||
raise ResourceNotFoundException()
|
||||
del self.principal_policies[k]
|
||||
|
||||
def detach_principal_policy(self, policy_name, principal_arn):
|
||||
# this may raises ResourceNotFoundException
|
||||
self._get_principal(principal_arn)
|
||||
@ -819,11 +978,187 @@ class IoTBackend(BaseBackend):
|
||||
self.region_name,
|
||||
)
|
||||
self.jobs[job_id] = job
|
||||
|
||||
for thing_arn in targets:
|
||||
thing_name = thing_arn.split(":")[-1].split("/")[-1]
|
||||
job_execution = FakeJobExecution(job_id, thing_arn)
|
||||
self.job_executions[(job_id, thing_name)] = job_execution
|
||||
return job.job_arn, job_id, description
|
||||
|
||||
def describe_job(self, job_id):
|
||||
jobs = [_ for _ in self.jobs.values() if _.job_id == job_id]
|
||||
if len(jobs) == 0:
|
||||
raise ResourceNotFoundException()
|
||||
return jobs[0]
|
||||
|
||||
def delete_job(self, job_id, force):
|
||||
job = self.jobs[job_id]
|
||||
|
||||
if job.status == "IN_PROGRESS" and force:
|
||||
del self.jobs[job_id]
|
||||
elif job.status != "IN_PROGRESS":
|
||||
del self.jobs[job_id]
|
||||
else:
|
||||
raise InvalidStateTransitionException()
|
||||
|
||||
def cancel_job(self, job_id, reason_code, comment, force):
|
||||
job = self.jobs[job_id]
|
||||
|
||||
job.reason_code = reason_code if reason_code is not None else job.reason_code
|
||||
job.comment = comment if comment is not None else job.comment
|
||||
job.force = force if force is not None and force != job.force else job.force
|
||||
job.status = "CANCELED"
|
||||
|
||||
if job.status == "IN_PROGRESS" and force:
|
||||
self.jobs[job_id] = job
|
||||
elif job.status != "IN_PROGRESS":
|
||||
self.jobs[job_id] = job
|
||||
else:
|
||||
raise InvalidStateTransitionException()
|
||||
|
||||
return job
|
||||
|
||||
def get_job_document(self, job_id):
|
||||
return self.jobs[job_id]
|
||||
|
||||
def list_jobs(
|
||||
self,
|
||||
status,
|
||||
target_selection,
|
||||
max_results,
|
||||
token,
|
||||
thing_group_name,
|
||||
thing_group_id,
|
||||
):
|
||||
# TODO: implement filters
|
||||
all_jobs = [_.to_dict() for _ in self.jobs.values()]
|
||||
filtered_jobs = all_jobs
|
||||
|
||||
if token is None:
|
||||
jobs = filtered_jobs[0:max_results]
|
||||
next_token = str(max_results) if len(filtered_jobs) > max_results else None
|
||||
else:
|
||||
token = int(token)
|
||||
jobs = filtered_jobs[token : token + max_results]
|
||||
next_token = (
|
||||
str(token + max_results)
|
||||
if len(filtered_jobs) > token + max_results
|
||||
else None
|
||||
)
|
||||
|
||||
return jobs, next_token
|
||||
|
||||
def describe_job_execution(self, job_id, thing_name, execution_number):
|
||||
try:
|
||||
job_execution = self.job_executions[(job_id, thing_name)]
|
||||
except KeyError:
|
||||
raise ResourceNotFoundException()
|
||||
|
||||
if job_execution is None or (
|
||||
execution_number is not None
|
||||
and job_execution.execution_number != execution_number
|
||||
):
|
||||
raise ResourceNotFoundException()
|
||||
|
||||
return job_execution
|
||||
|
||||
def cancel_job_execution(
|
||||
self, job_id, thing_name, force, expected_version, status_details
|
||||
):
|
||||
job_execution = self.job_executions[(job_id, thing_name)]
|
||||
|
||||
if job_execution is None:
|
||||
raise ResourceNotFoundException()
|
||||
|
||||
job_execution.force_canceled = (
|
||||
force if force is not None else job_execution.force_canceled
|
||||
)
|
||||
# TODO: implement expected_version and status_details (at most 10 can be specified)
|
||||
|
||||
if job_execution.status == "IN_PROGRESS" and force:
|
||||
job_execution.status = "CANCELED"
|
||||
self.job_executions[(job_id, thing_name)] = job_execution
|
||||
elif job_execution.status != "IN_PROGRESS":
|
||||
job_execution.status = "CANCELED"
|
||||
self.job_executions[(job_id, thing_name)] = job_execution
|
||||
else:
|
||||
raise InvalidStateTransitionException()
|
||||
|
||||
def delete_job_execution(self, job_id, thing_name, execution_number, force):
|
||||
job_execution = self.job_executions[(job_id, thing_name)]
|
||||
|
||||
if job_execution.execution_number != execution_number:
|
||||
raise ResourceNotFoundException()
|
||||
|
||||
if job_execution.status == "IN_PROGRESS" and force:
|
||||
del self.job_executions[(job_id, thing_name)]
|
||||
elif job_execution.status != "IN_PROGRESS":
|
||||
del self.job_executions[(job_id, thing_name)]
|
||||
else:
|
||||
raise InvalidStateTransitionException()
|
||||
|
||||
def list_job_executions_for_job(self, job_id, status, max_results, next_token):
|
||||
job_executions = [
|
||||
self.job_executions[je].to_dict()
|
||||
for je in self.job_executions
|
||||
if je[0] == job_id
|
||||
]
|
||||
|
||||
if status is not None:
|
||||
job_executions = list(
|
||||
filter(
|
||||
lambda elem: status in elem["status"] and elem["status"] == status,
|
||||
job_executions,
|
||||
)
|
||||
)
|
||||
|
||||
token = next_token
|
||||
if token is None:
|
||||
job_executions = job_executions[0:max_results]
|
||||
next_token = str(max_results) if len(job_executions) > max_results else None
|
||||
else:
|
||||
token = int(token)
|
||||
job_executions = job_executions[token : token + max_results]
|
||||
next_token = (
|
||||
str(token + max_results)
|
||||
if len(job_executions) > token + max_results
|
||||
else None
|
||||
)
|
||||
|
||||
return job_executions, next_token
|
||||
|
||||
def list_job_executions_for_thing(
|
||||
self, thing_name, status, max_results, next_token
|
||||
):
|
||||
job_executions = [
|
||||
self.job_executions[je].to_dict()
|
||||
for je in self.job_executions
|
||||
if je[1] == thing_name
|
||||
]
|
||||
|
||||
if status is not None:
|
||||
job_executions = list(
|
||||
filter(
|
||||
lambda elem: status in elem["status"] and elem["status"] == status,
|
||||
job_executions,
|
||||
)
|
||||
)
|
||||
|
||||
token = next_token
|
||||
if token is None:
|
||||
job_executions = job_executions[0:max_results]
|
||||
next_token = str(max_results) if len(job_executions) > max_results else None
|
||||
else:
|
||||
token = int(token)
|
||||
job_executions = job_executions[token : token + max_results]
|
||||
next_token = (
|
||||
str(token + max_results)
|
||||
if len(job_executions) > token + max_results
|
||||
else None
|
||||
)
|
||||
|
||||
return job_executions, next_token
|
||||
|
||||
|
||||
iot_backends = {}
|
||||
for region in Session().get_available_regions("iot"):
|
||||
|
@ -1,6 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
from six.moves.urllib.parse import unquote
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from .models import iot_backends
|
||||
@ -141,6 +142,8 @@ class IoTResponse(BaseResponse):
|
||||
createdAt=job.created_at,
|
||||
description=job.description,
|
||||
documentParameters=job.document_parameters,
|
||||
forceCanceled=job.force,
|
||||
reasonCode=job.reason_code,
|
||||
jobArn=job.job_arn,
|
||||
jobExecutionsRolloutConfig=job.job_executions_rollout_config,
|
||||
jobId=job.job_id,
|
||||
@ -154,6 +157,127 @@ class IoTResponse(BaseResponse):
|
||||
)
|
||||
)
|
||||
|
||||
def delete_job(self):
|
||||
job_id = self._get_param("jobId")
|
||||
force = self._get_bool_param("force")
|
||||
|
||||
self.iot_backend.delete_job(job_id=job_id, force=force)
|
||||
|
||||
return json.dumps(dict())
|
||||
|
||||
def cancel_job(self):
|
||||
job_id = self._get_param("jobId")
|
||||
reason_code = self._get_param("reasonCode")
|
||||
comment = self._get_param("comment")
|
||||
force = self._get_bool_param("force")
|
||||
|
||||
job = self.iot_backend.cancel_job(
|
||||
job_id=job_id, reason_code=reason_code, comment=comment, force=force
|
||||
)
|
||||
|
||||
return json.dumps(job.to_dict())
|
||||
|
||||
def get_job_document(self):
|
||||
job = self.iot_backend.get_job_document(job_id=self._get_param("jobId"))
|
||||
|
||||
if job.document is not None:
|
||||
return json.dumps({"document": job.document})
|
||||
else:
|
||||
# job.document_source is not None:
|
||||
# TODO: needs to be implemented to get document_source's content from S3
|
||||
return json.dumps({"document": ""})
|
||||
|
||||
def list_jobs(self):
|
||||
status = (self._get_param("status"),)
|
||||
target_selection = (self._get_param("targetSelection"),)
|
||||
max_results = self._get_int_param(
|
||||
"maxResults", 50
|
||||
) # not the default, but makes testing easier
|
||||
previous_next_token = self._get_param("nextToken")
|
||||
thing_group_name = (self._get_param("thingGroupName"),)
|
||||
thing_group_id = self._get_param("thingGroupId")
|
||||
jobs, next_token = self.iot_backend.list_jobs(
|
||||
status=status,
|
||||
target_selection=target_selection,
|
||||
max_results=max_results,
|
||||
token=previous_next_token,
|
||||
thing_group_name=thing_group_name,
|
||||
thing_group_id=thing_group_id,
|
||||
)
|
||||
|
||||
return json.dumps(dict(jobs=jobs, nextToken=next_token))
|
||||
|
||||
def describe_job_execution(self):
|
||||
job_id = self._get_param("jobId")
|
||||
thing_name = self._get_param("thingName")
|
||||
execution_number = self._get_int_param("executionNumber")
|
||||
job_execution = self.iot_backend.describe_job_execution(
|
||||
job_id=job_id, thing_name=thing_name, execution_number=execution_number
|
||||
)
|
||||
|
||||
return json.dumps(dict(execution=job_execution.to_get_dict()))
|
||||
|
||||
def cancel_job_execution(self):
|
||||
job_id = self._get_param("jobId")
|
||||
thing_name = self._get_param("thingName")
|
||||
force = self._get_bool_param("force")
|
||||
expected_version = self._get_int_param("expectedVersion")
|
||||
status_details = self._get_param("statusDetails")
|
||||
|
||||
self.iot_backend.cancel_job_execution(
|
||||
job_id=job_id,
|
||||
thing_name=thing_name,
|
||||
force=force,
|
||||
expected_version=expected_version,
|
||||
status_details=status_details,
|
||||
)
|
||||
|
||||
return json.dumps(dict())
|
||||
|
||||
def delete_job_execution(self):
|
||||
job_id = self._get_param("jobId")
|
||||
thing_name = self._get_param("thingName")
|
||||
execution_number = self._get_int_param("executionNumber")
|
||||
force = self._get_bool_param("force")
|
||||
|
||||
self.iot_backend.delete_job_execution(
|
||||
job_id=job_id,
|
||||
thing_name=thing_name,
|
||||
execution_number=execution_number,
|
||||
force=force,
|
||||
)
|
||||
|
||||
return json.dumps(dict())
|
||||
|
||||
def list_job_executions_for_job(self):
|
||||
job_id = self._get_param("jobId")
|
||||
status = self._get_param("status")
|
||||
max_results = self._get_int_param(
|
||||
"maxResults", 50
|
||||
) # not the default, but makes testing easier
|
||||
next_token = self._get_param("nextToken")
|
||||
job_executions, next_token = self.iot_backend.list_job_executions_for_job(
|
||||
job_id=job_id, status=status, max_results=max_results, next_token=next_token
|
||||
)
|
||||
|
||||
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
|
||||
|
||||
def list_job_executions_for_thing(self):
|
||||
thing_name = self._get_param("thingName")
|
||||
status = self._get_param("status")
|
||||
max_results = self._get_int_param(
|
||||
"maxResults", 50
|
||||
) # not the default, but makes testing easier
|
||||
next_token = self._get_param("nextToken")
|
||||
job_executions, next_token = self.iot_backend.list_job_executions_for_thing(
|
||||
thing_name=thing_name,
|
||||
status=status,
|
||||
max_results=max_results,
|
||||
next_token=next_token,
|
||||
)
|
||||
|
||||
return json.dumps(dict(executionSummaries=job_executions, nextToken=next_token))
|
||||
|
||||
def create_keys_and_certificate(self):
|
||||
set_as_active = self._get_bool_param("setAsActive")
|
||||
cert, key_pair = self.iot_backend.create_keys_and_certificate(
|
||||
@ -241,12 +365,61 @@ class IoTResponse(BaseResponse):
|
||||
self.iot_backend.delete_policy(policy_name=policy_name)
|
||||
return json.dumps(dict())
|
||||
|
||||
def create_policy_version(self):
|
||||
policy_name = self._get_param("policyName")
|
||||
policy_document = self._get_param("policyDocument")
|
||||
set_as_default = self._get_bool_param("setAsDefault")
|
||||
policy_version = self.iot_backend.create_policy_version(
|
||||
policy_name, policy_document, set_as_default
|
||||
)
|
||||
|
||||
return json.dumps(dict(policy_version.to_dict_at_creation()))
|
||||
|
||||
def set_default_policy_version(self):
|
||||
policy_name = self._get_param("policyName")
|
||||
version_id = self._get_param("policyVersionId")
|
||||
self.iot_backend.set_default_policy_version(policy_name, version_id)
|
||||
|
||||
return json.dumps(dict())
|
||||
|
||||
def get_policy_version(self):
|
||||
policy_name = self._get_param("policyName")
|
||||
version_id = self._get_param("policyVersionId")
|
||||
policy_version = self.iot_backend.get_policy_version(policy_name, version_id)
|
||||
return json.dumps(dict(policy_version.to_get_dict()))
|
||||
|
||||
def list_policy_versions(self):
|
||||
policy_name = self._get_param("policyName")
|
||||
policiy_versions = self.iot_backend.list_policy_versions(
|
||||
policy_name=policy_name
|
||||
)
|
||||
|
||||
return json.dumps(dict(policyVersions=[_.to_dict() for _ in policiy_versions]))
|
||||
|
||||
def delete_policy_version(self):
|
||||
policy_name = self._get_param("policyName")
|
||||
version_id = self._get_param("policyVersionId")
|
||||
self.iot_backend.delete_policy_version(policy_name, version_id)
|
||||
|
||||
return json.dumps(dict())
|
||||
|
||||
def attach_policy(self):
|
||||
policy_name = self._get_param("policyName")
|
||||
target = self._get_param("target")
|
||||
self.iot_backend.attach_policy(policy_name=policy_name, target=target)
|
||||
return json.dumps(dict())
|
||||
|
||||
def list_attached_policies(self):
|
||||
principal = unquote(self._get_param("target"))
|
||||
# marker = self._get_param("marker")
|
||||
# page_size = self._get_int_param("pageSize")
|
||||
policies = self.iot_backend.list_attached_policies(target=principal)
|
||||
# TODO: implement pagination in the future
|
||||
next_marker = None
|
||||
return json.dumps(
|
||||
dict(policies=[_.to_dict() for _ in policies], nextMarker=next_marker)
|
||||
)
|
||||
|
||||
def attach_principal_policy(self):
|
||||
policy_name = self._get_param("policyName")
|
||||
principal = self.headers.get("x-amzn-iot-principal")
|
||||
|
@ -1,5 +1,5 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}}
|
||||
}
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Resources": {"VPCEIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}}}
|
||||
}
|
||||
|
@ -1,276 +1,276 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
"Parameters": {
|
||||
"SSHLocation": {
|
||||
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
|
||||
"Description": " The IP address range that can be used to SSH to the EC2 instances",
|
||||
"Default": "0.0.0.0/0",
|
||||
"MinLength": "9",
|
||||
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
|
||||
"MaxLength": "18",
|
||||
"Type": "String",
|
||||
},
|
||||
"KeyName": {
|
||||
"Type": "String",
|
||||
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
|
||||
"MinLength": "1",
|
||||
"AllowedPattern": "[\\x20-\\x7E]*",
|
||||
"MaxLength": "255",
|
||||
"ConstraintDescription": "can contain only ASCII characters.",
|
||||
},
|
||||
"InstanceType": {
|
||||
"Default": "m1.small",
|
||||
"ConstraintDescription": "must be a valid EC2 instance type.",
|
||||
"Type": "String",
|
||||
"Description": "WebServer EC2 instance type",
|
||||
"AllowedValues": [
|
||||
"t1.micro",
|
||||
"m1.small",
|
||||
"m1.medium",
|
||||
"m1.large",
|
||||
"m1.xlarge",
|
||||
"m2.xlarge",
|
||||
"m2.2xlarge",
|
||||
"m2.4xlarge",
|
||||
"m3.xlarge",
|
||||
"m3.2xlarge",
|
||||
"c1.medium",
|
||||
"c1.xlarge",
|
||||
"cc1.4xlarge",
|
||||
"cc2.8xlarge",
|
||||
"cg1.4xlarge",
|
||||
],
|
||||
},
|
||||
},
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Outputs": {
|
||||
"URL": {
|
||||
"Description": "Newly created application URL",
|
||||
"Value": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}],
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
"Resources": {
|
||||
"Subnet": {
|
||||
"Type": "AWS::EC2::Subnet",
|
||||
"Properties": {
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"CidrBlock": "10.0.0.0/24",
|
||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||
},
|
||||
},
|
||||
"WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"},
|
||||
"Route": {
|
||||
"Type": "AWS::EC2::Route",
|
||||
"Properties": {
|
||||
"GatewayId": {"Ref": "InternetGateway"},
|
||||
"DestinationCidrBlock": "0.0.0.0/0",
|
||||
"RouteTableId": {"Ref": "RouteTable"},
|
||||
},
|
||||
"DependsOn": "AttachGateway",
|
||||
},
|
||||
"SubnetRouteTableAssociation": {
|
||||
"Type": "AWS::EC2::SubnetRouteTableAssociation",
|
||||
"Properties": {
|
||||
"SubnetId": {"Ref": "Subnet"},
|
||||
"RouteTableId": {"Ref": "RouteTable"},
|
||||
},
|
||||
},
|
||||
"InternetGateway": {
|
||||
"Type": "AWS::EC2::InternetGateway",
|
||||
"Properties": {
|
||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}]
|
||||
},
|
||||
},
|
||||
"RouteTable": {
|
||||
"Type": "AWS::EC2::RouteTable",
|
||||
"Properties": {
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||
},
|
||||
},
|
||||
"WebServerWaitCondition": {
|
||||
"Type": "AWS::CloudFormation::WaitCondition",
|
||||
"Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"},
|
||||
"DependsOn": "WebServerInstance",
|
||||
},
|
||||
"VPC": {
|
||||
"Type": "AWS::EC2::VPC",
|
||||
"Properties": {
|
||||
"CidrBlock": "10.0.0.0/16",
|
||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||
},
|
||||
},
|
||||
"InstanceSecurityGroup": {
|
||||
"Type": "AWS::EC2::SecurityGroup",
|
||||
"Properties": {
|
||||
"SecurityGroupIngress": [
|
||||
{
|
||||
"ToPort": "22",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": {"Ref": "SSHLocation"},
|
||||
"FromPort": "22",
|
||||
},
|
||||
{
|
||||
"ToPort": "80",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": "0.0.0.0/0",
|
||||
"FromPort": "80",
|
||||
},
|
||||
],
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"GroupDescription": "Enable SSH access via port 22",
|
||||
},
|
||||
},
|
||||
"WebServerInstance": {
|
||||
"Type": "AWS::EC2::Instance",
|
||||
"Properties": {
|
||||
"UserData": {
|
||||
"Fn::Base64": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"#!/bin/bash\n",
|
||||
"yum update -y aws-cfn-bootstrap\n",
|
||||
"# Helper function\n",
|
||||
"function error_exit\n",
|
||||
"{\n",
|
||||
' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'',
|
||||
{"Ref": "WebServerWaitHandle"},
|
||||
"'\n",
|
||||
" exit 1\n",
|
||||
"}\n",
|
||||
"# Install the simple web page\n",
|
||||
"/opt/aws/bin/cfn-init -s ",
|
||||
{"Ref": "AWS::StackId"},
|
||||
" -r WebServerInstance ",
|
||||
" --region ",
|
||||
{"Ref": "AWS::Region"},
|
||||
" || error_exit 'Failed to run cfn-init'\n",
|
||||
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
|
||||
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
|
||||
"# All done so signal success\n",
|
||||
'/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'',
|
||||
{"Ref": "WebServerWaitHandle"},
|
||||
"'\n",
|
||||
],
|
||||
]
|
||||
}
|
||||
},
|
||||
"Tags": [
|
||||
{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"},
|
||||
{"Value": "Bar", "Key": "Foo"},
|
||||
],
|
||||
"SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}],
|
||||
"KeyName": {"Ref": "KeyName"},
|
||||
"SubnetId": {"Ref": "Subnet"},
|
||||
"ImageId": {
|
||||
"Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"]
|
||||
},
|
||||
"InstanceType": {"Ref": "InstanceType"},
|
||||
},
|
||||
"Metadata": {
|
||||
"Comment": "Install a simple PHP application",
|
||||
"AWS::CloudFormation::Init": {
|
||||
"config": {
|
||||
"files": {
|
||||
"/etc/cfn/cfn-hup.conf": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"[main]\n",
|
||||
"stack=",
|
||||
{"Ref": "AWS::StackId"},
|
||||
"\n",
|
||||
"region=",
|
||||
{"Ref": "AWS::Region"},
|
||||
"\n",
|
||||
],
|
||||
]
|
||||
},
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "000400",
|
||||
},
|
||||
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"[cfn-auto-reloader-hook]\n",
|
||||
"triggers=post.update\n",
|
||||
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
|
||||
"action=/opt/aws/bin/cfn-init -s ",
|
||||
{"Ref": "AWS::StackId"},
|
||||
" -r WebServerInstance ",
|
||||
" --region ",
|
||||
{"Ref": "AWS::Region"},
|
||||
"\n",
|
||||
"runas=root\n",
|
||||
],
|
||||
]
|
||||
}
|
||||
},
|
||||
"/var/www/html/index.php": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"<?php\n",
|
||||
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
|
||||
"?>\n",
|
||||
],
|
||||
]
|
||||
},
|
||||
"owner": "apache",
|
||||
"group": "apache",
|
||||
"mode": "000644",
|
||||
},
|
||||
},
|
||||
"services": {
|
||||
"sysvinit": {
|
||||
"httpd": {"ensureRunning": "true", "enabled": "true"},
|
||||
"sendmail": {
|
||||
"ensureRunning": "false",
|
||||
"enabled": "false",
|
||||
},
|
||||
}
|
||||
},
|
||||
"packages": {"yum": {"httpd": [], "php": []}},
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
"IPAddress": {
|
||||
"Type": "AWS::EC2::EIP",
|
||||
"Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"},
|
||||
"DependsOn": "AttachGateway",
|
||||
},
|
||||
"AttachGateway": {
|
||||
"Type": "AWS::EC2::VPCGatewayAttachment",
|
||||
"Properties": {
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"InternetGatewayId": {"Ref": "InternetGateway"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"Mappings": {
|
||||
"RegionMap": {
|
||||
"ap-southeast-1": {"AMI": "ami-74dda626"},
|
||||
"ap-southeast-2": {"AMI": "ami-b3990e89"},
|
||||
"us-west-2": {"AMI": "ami-16fd7026"},
|
||||
"us-east-1": {"AMI": "ami-7f418316"},
|
||||
"ap-northeast-1": {"AMI": "ami-dcfa4edd"},
|
||||
"us-west-1": {"AMI": "ami-951945d0"},
|
||||
"eu-west-1": {"AMI": "ami-24506250"},
|
||||
"sa-east-1": {"AMI": "ami-3e3be423"},
|
||||
}
|
||||
},
|
||||
}
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
"Parameters": {
|
||||
"SSHLocation": {
|
||||
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
|
||||
"Description": " The IP address range that can be used to SSH to the EC2 instances",
|
||||
"Default": "0.0.0.0/0",
|
||||
"MinLength": "9",
|
||||
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
|
||||
"MaxLength": "18",
|
||||
"Type": "String",
|
||||
},
|
||||
"KeyName": {
|
||||
"Type": "String",
|
||||
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
|
||||
"MinLength": "1",
|
||||
"AllowedPattern": "[\\x20-\\x7E]*",
|
||||
"MaxLength": "255",
|
||||
"ConstraintDescription": "can contain only ASCII characters.",
|
||||
},
|
||||
"InstanceType": {
|
||||
"Default": "m1.small",
|
||||
"ConstraintDescription": "must be a valid EC2 instance type.",
|
||||
"Type": "String",
|
||||
"Description": "WebServer EC2 instance type",
|
||||
"AllowedValues": [
|
||||
"t1.micro",
|
||||
"m1.small",
|
||||
"m1.medium",
|
||||
"m1.large",
|
||||
"m1.xlarge",
|
||||
"m2.xlarge",
|
||||
"m2.2xlarge",
|
||||
"m2.4xlarge",
|
||||
"m3.xlarge",
|
||||
"m3.2xlarge",
|
||||
"c1.medium",
|
||||
"c1.xlarge",
|
||||
"cc1.4xlarge",
|
||||
"cc2.8xlarge",
|
||||
"cg1.4xlarge",
|
||||
],
|
||||
},
|
||||
},
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Outputs": {
|
||||
"URL": {
|
||||
"Description": "Newly created application URL",
|
||||
"Value": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
["http://", {"Fn::GetAtt": ["WebServerInstance", "PublicIp"]}],
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
"Resources": {
|
||||
"Subnet": {
|
||||
"Type": "AWS::EC2::Subnet",
|
||||
"Properties": {
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"CidrBlock": "10.0.0.0/24",
|
||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||
},
|
||||
},
|
||||
"WebServerWaitHandle": {"Type": "AWS::CloudFormation::WaitConditionHandle"},
|
||||
"Route": {
|
||||
"Type": "AWS::EC2::Route",
|
||||
"Properties": {
|
||||
"GatewayId": {"Ref": "InternetGateway"},
|
||||
"DestinationCidrBlock": "0.0.0.0/0",
|
||||
"RouteTableId": {"Ref": "RouteTable"},
|
||||
},
|
||||
"DependsOn": "AttachGateway",
|
||||
},
|
||||
"SubnetRouteTableAssociation": {
|
||||
"Type": "AWS::EC2::SubnetRouteTableAssociation",
|
||||
"Properties": {
|
||||
"SubnetId": {"Ref": "Subnet"},
|
||||
"RouteTableId": {"Ref": "RouteTable"},
|
||||
},
|
||||
},
|
||||
"InternetGateway": {
|
||||
"Type": "AWS::EC2::InternetGateway",
|
||||
"Properties": {
|
||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}]
|
||||
},
|
||||
},
|
||||
"RouteTable": {
|
||||
"Type": "AWS::EC2::RouteTable",
|
||||
"Properties": {
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||
},
|
||||
},
|
||||
"WebServerWaitCondition": {
|
||||
"Type": "AWS::CloudFormation::WaitCondition",
|
||||
"Properties": {"Handle": {"Ref": "WebServerWaitHandle"}, "Timeout": "300"},
|
||||
"DependsOn": "WebServerInstance",
|
||||
},
|
||||
"VPC": {
|
||||
"Type": "AWS::EC2::VPC",
|
||||
"Properties": {
|
||||
"CidrBlock": "10.0.0.0/16",
|
||||
"Tags": [{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"}],
|
||||
},
|
||||
},
|
||||
"InstanceSecurityGroup": {
|
||||
"Type": "AWS::EC2::SecurityGroup",
|
||||
"Properties": {
|
||||
"SecurityGroupIngress": [
|
||||
{
|
||||
"ToPort": "22",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": {"Ref": "SSHLocation"},
|
||||
"FromPort": "22",
|
||||
},
|
||||
{
|
||||
"ToPort": "80",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": "0.0.0.0/0",
|
||||
"FromPort": "80",
|
||||
},
|
||||
],
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"GroupDescription": "Enable SSH access via port 22",
|
||||
},
|
||||
},
|
||||
"WebServerInstance": {
|
||||
"Type": "AWS::EC2::Instance",
|
||||
"Properties": {
|
||||
"UserData": {
|
||||
"Fn::Base64": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"#!/bin/bash\n",
|
||||
"yum update -y aws-cfn-bootstrap\n",
|
||||
"# Helper function\n",
|
||||
"function error_exit\n",
|
||||
"{\n",
|
||||
' /opt/aws/bin/cfn-signal -e 1 -r "$1" \'',
|
||||
{"Ref": "WebServerWaitHandle"},
|
||||
"'\n",
|
||||
" exit 1\n",
|
||||
"}\n",
|
||||
"# Install the simple web page\n",
|
||||
"/opt/aws/bin/cfn-init -s ",
|
||||
{"Ref": "AWS::StackId"},
|
||||
" -r WebServerInstance ",
|
||||
" --region ",
|
||||
{"Ref": "AWS::Region"},
|
||||
" || error_exit 'Failed to run cfn-init'\n",
|
||||
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
|
||||
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
|
||||
"# All done so signal success\n",
|
||||
'/opt/aws/bin/cfn-signal -e 0 -r "WebServer setup complete" \'',
|
||||
{"Ref": "WebServerWaitHandle"},
|
||||
"'\n",
|
||||
],
|
||||
]
|
||||
}
|
||||
},
|
||||
"Tags": [
|
||||
{"Value": {"Ref": "AWS::StackId"}, "Key": "Application"},
|
||||
{"Value": "Bar", "Key": "Foo"},
|
||||
],
|
||||
"SecurityGroupIds": [{"Ref": "InstanceSecurityGroup"}],
|
||||
"KeyName": {"Ref": "KeyName"},
|
||||
"SubnetId": {"Ref": "Subnet"},
|
||||
"ImageId": {
|
||||
"Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "AMI"]
|
||||
},
|
||||
"InstanceType": {"Ref": "InstanceType"},
|
||||
},
|
||||
"Metadata": {
|
||||
"Comment": "Install a simple PHP application",
|
||||
"AWS::CloudFormation::Init": {
|
||||
"config": {
|
||||
"files": {
|
||||
"/etc/cfn/cfn-hup.conf": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"[main]\n",
|
||||
"stack=",
|
||||
{"Ref": "AWS::StackId"},
|
||||
"\n",
|
||||
"region=",
|
||||
{"Ref": "AWS::Region"},
|
||||
"\n",
|
||||
],
|
||||
]
|
||||
},
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "000400",
|
||||
},
|
||||
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"[cfn-auto-reloader-hook]\n",
|
||||
"triggers=post.update\n",
|
||||
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
|
||||
"action=/opt/aws/bin/cfn-init -s ",
|
||||
{"Ref": "AWS::StackId"},
|
||||
" -r WebServerInstance ",
|
||||
" --region ",
|
||||
{"Ref": "AWS::Region"},
|
||||
"\n",
|
||||
"runas=root\n",
|
||||
],
|
||||
]
|
||||
}
|
||||
},
|
||||
"/var/www/html/index.php": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"<?php\n",
|
||||
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
|
||||
"?>\n",
|
||||
],
|
||||
]
|
||||
},
|
||||
"owner": "apache",
|
||||
"group": "apache",
|
||||
"mode": "000644",
|
||||
},
|
||||
},
|
||||
"services": {
|
||||
"sysvinit": {
|
||||
"httpd": {"ensureRunning": "true", "enabled": "true"},
|
||||
"sendmail": {
|
||||
"ensureRunning": "false",
|
||||
"enabled": "false",
|
||||
},
|
||||
}
|
||||
},
|
||||
"packages": {"yum": {"httpd": [], "php": []}},
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
"IPAddress": {
|
||||
"Type": "AWS::EC2::EIP",
|
||||
"Properties": {"InstanceId": {"Ref": "WebServerInstance"}, "Domain": "vpc"},
|
||||
"DependsOn": "AttachGateway",
|
||||
},
|
||||
"AttachGateway": {
|
||||
"Type": "AWS::EC2::VPCGatewayAttachment",
|
||||
"Properties": {
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"InternetGatewayId": {"Ref": "InternetGateway"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"Mappings": {
|
||||
"RegionMap": {
|
||||
"ap-southeast-1": {"AMI": "ami-74dda626"},
|
||||
"ap-southeast-2": {"AMI": "ami-b3990e89"},
|
||||
"us-west-2": {"AMI": "ami-16fd7026"},
|
||||
"us-east-1": {"AMI": "ami-7f418316"},
|
||||
"ap-northeast-1": {"AMI": "ami-dcfa4edd"},
|
||||
"us-west-1": {"AMI": "ami-951945d0"},
|
||||
"eu-west-1": {"AMI": "ami-24506250"},
|
||||
"sa-east-1": {"AMI": "ami-3e3be423"},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@ -1,117 +1,117 @@
|
||||
import boto
|
||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch_deprecated
|
||||
|
||||
|
||||
def alarm_fixture(name="tester", action=None):
|
||||
action = action or ["arn:alarm"]
|
||||
return MetricAlarm(
|
||||
name=name,
|
||||
namespace="{0}_namespace".format(name),
|
||||
metric="{0}_metric".format(name),
|
||||
comparison=">=",
|
||||
threshold=2.0,
|
||||
period=60,
|
||||
evaluation_periods=5,
|
||||
statistic="Average",
|
||||
description="A test",
|
||||
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
|
||||
alarm_actions=action,
|
||||
ok_actions=["arn:ok"],
|
||||
insufficient_data_actions=["arn:insufficient"],
|
||||
unit="Seconds",
|
||||
)
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_create_alarm():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarm = alarm_fixture()
|
||||
conn.create_alarm(alarm)
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(1)
|
||||
alarm = alarms[0]
|
||||
alarm.name.should.equal("tester")
|
||||
alarm.namespace.should.equal("tester_namespace")
|
||||
alarm.metric.should.equal("tester_metric")
|
||||
alarm.comparison.should.equal(">=")
|
||||
alarm.threshold.should.equal(2.0)
|
||||
alarm.period.should.equal(60)
|
||||
alarm.evaluation_periods.should.equal(5)
|
||||
alarm.statistic.should.equal("Average")
|
||||
alarm.description.should.equal("A test")
|
||||
dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
||||
list(alarm.alarm_actions).should.equal(["arn:alarm"])
|
||||
list(alarm.ok_actions).should.equal(["arn:ok"])
|
||||
list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"])
|
||||
alarm.unit.should.equal("Seconds")
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_delete_alarm():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
alarm = alarm_fixture()
|
||||
conn.create_alarm(alarm)
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(1)
|
||||
|
||||
alarms[0].delete()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_put_metric_data():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
conn.put_metric_data(
|
||||
namespace="tester",
|
||||
name="metric",
|
||||
value=1.5,
|
||||
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric.namespace.should.equal("tester")
|
||||
metric.name.should.equal("metric")
|
||||
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_describe_alarms():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
|
||||
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
|
||||
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
|
||||
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(4)
|
||||
alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
|
||||
alarms.should.have.length_of(2)
|
||||
alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
|
||||
alarms.should.have.length_of(3)
|
||||
alarms = conn.describe_alarms(action_prefix="afoo")
|
||||
alarms.should.have.length_of(2)
|
||||
|
||||
for alarm in conn.describe_alarms():
|
||||
alarm.delete()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
import boto
|
||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch_deprecated
|
||||
|
||||
|
||||
def alarm_fixture(name="tester", action=None):
|
||||
action = action or ["arn:alarm"]
|
||||
return MetricAlarm(
|
||||
name=name,
|
||||
namespace="{0}_namespace".format(name),
|
||||
metric="{0}_metric".format(name),
|
||||
comparison=">=",
|
||||
threshold=2.0,
|
||||
period=60,
|
||||
evaluation_periods=5,
|
||||
statistic="Average",
|
||||
description="A test",
|
||||
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
|
||||
alarm_actions=action,
|
||||
ok_actions=["arn:ok"],
|
||||
insufficient_data_actions=["arn:insufficient"],
|
||||
unit="Seconds",
|
||||
)
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_create_alarm():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarm = alarm_fixture()
|
||||
conn.create_alarm(alarm)
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(1)
|
||||
alarm = alarms[0]
|
||||
alarm.name.should.equal("tester")
|
||||
alarm.namespace.should.equal("tester_namespace")
|
||||
alarm.metric.should.equal("tester_metric")
|
||||
alarm.comparison.should.equal(">=")
|
||||
alarm.threshold.should.equal(2.0)
|
||||
alarm.period.should.equal(60)
|
||||
alarm.evaluation_periods.should.equal(5)
|
||||
alarm.statistic.should.equal("Average")
|
||||
alarm.description.should.equal("A test")
|
||||
dict(alarm.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
||||
list(alarm.alarm_actions).should.equal(["arn:alarm"])
|
||||
list(alarm.ok_actions).should.equal(["arn:ok"])
|
||||
list(alarm.insufficient_data_actions).should.equal(["arn:insufficient"])
|
||||
alarm.unit.should.equal("Seconds")
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_delete_alarm():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
alarm = alarm_fixture()
|
||||
conn.create_alarm(alarm)
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(1)
|
||||
|
||||
alarms[0].delete()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_put_metric_data():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
conn.put_metric_data(
|
||||
namespace="tester",
|
||||
name="metric",
|
||||
value=1.5,
|
||||
dimensions={"InstanceId": ["i-0123456,i-0123457"]},
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric.namespace.should.equal("tester")
|
||||
metric.name.should.equal("metric")
|
||||
dict(metric.dimensions).should.equal({"InstanceId": ["i-0123456,i-0123457"]})
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_describe_alarms():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
|
||||
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
|
||||
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
|
||||
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(4)
|
||||
alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
|
||||
alarms.should.have.length_of(2)
|
||||
alarms = conn.describe_alarms(alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
|
||||
alarms.should.have.length_of(3)
|
||||
alarms = conn.describe_alarms(action_prefix="afoo")
|
||||
alarms.should.have.length_of(2)
|
||||
|
||||
for alarm in conn.describe_alarms():
|
||||
alarm.delete()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
@ -1,182 +1,182 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto.datapipeline
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_datapipeline_deprecated
|
||||
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
|
||||
|
||||
|
||||
def get_value_from_fields(key, fields):
|
||||
for field in fields:
|
||||
if field["key"] == key:
|
||||
return field["stringValue"]
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_create_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
|
||||
pipeline_id = res["pipelineId"]
|
||||
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
|
||||
"pipelineDescriptionList"
|
||||
]
|
||||
pipeline_descriptions.should.have.length_of(1)
|
||||
|
||||
pipeline_description = pipeline_descriptions[0]
|
||||
pipeline_description["name"].should.equal("mypipeline")
|
||||
pipeline_description["pipelineId"].should.equal(pipeline_id)
|
||||
fields = pipeline_description["fields"]
|
||||
|
||||
get_value_from_fields("@pipelineState", fields).should.equal("PENDING")
|
||||
get_value_from_fields("uniqueId", fields).should.equal("some-unique-id")
|
||||
|
||||
|
||||
PIPELINE_OBJECTS = [
|
||||
{
|
||||
"id": "Default",
|
||||
"name": "Default",
|
||||
"fields": [{"key": "workerGroup", "stringValue": "workerGroup"}],
|
||||
},
|
||||
{
|
||||
"id": "Schedule",
|
||||
"name": "Schedule",
|
||||
"fields": [
|
||||
{"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"},
|
||||
{"key": "type", "stringValue": "Schedule"},
|
||||
{"key": "period", "stringValue": "1 hour"},
|
||||
{"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "SayHello",
|
||||
"name": "SayHello",
|
||||
"fields": [
|
||||
{"key": "type", "stringValue": "ShellCommandActivity"},
|
||||
{"key": "command", "stringValue": "echo hello"},
|
||||
{"key": "parent", "refValue": "Default"},
|
||||
{"key": "schedule", "refValue": "Schedule"},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_creating_pipeline_definition():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||
|
||||
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
|
||||
pipeline_definition["pipelineObjects"].should.have.length_of(3)
|
||||
default_object = pipeline_definition["pipelineObjects"][0]
|
||||
default_object["name"].should.equal("Default")
|
||||
default_object["id"].should.equal("Default")
|
||||
default_object["fields"].should.equal(
|
||||
[{"key": "workerGroup", "stringValue": "workerGroup"}]
|
||||
)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_describing_pipeline_objects():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||
|
||||
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
|
||||
"pipelineObjects"
|
||||
]
|
||||
|
||||
objects.should.have.length_of(2)
|
||||
default_object = [x for x in objects if x["id"] == "Default"][0]
|
||||
default_object["name"].should.equal("Default")
|
||||
default_object["fields"].should.equal(
|
||||
[{"key": "workerGroup", "stringValue": "workerGroup"}]
|
||||
)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_activate_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
|
||||
pipeline_id = res["pipelineId"]
|
||||
conn.activate_pipeline(pipeline_id)
|
||||
|
||||
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
|
||||
"pipelineDescriptionList"
|
||||
]
|
||||
pipeline_descriptions.should.have.length_of(1)
|
||||
pipeline_description = pipeline_descriptions[0]
|
||||
fields = pipeline_description["fields"]
|
||||
|
||||
get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED")
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_delete_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.delete_pipeline(pipeline_id)
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["pipelineIdList"].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_listing_pipelines():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
|
||||
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["hasMoreResults"].should.be(False)
|
||||
response["marker"].should.be.none
|
||||
response["pipelineIdList"].should.have.length_of(2)
|
||||
response["pipelineIdList"].should.contain(
|
||||
{"id": res1["pipelineId"], "name": "mypipeline1"}
|
||||
)
|
||||
response["pipelineIdList"].should.contain(
|
||||
{"id": res2["pipelineId"], "name": "mypipeline2"}
|
||||
)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_listing_paginated_pipelines():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
for i in range(100):
|
||||
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["hasMoreResults"].should.be(True)
|
||||
response["marker"].should.equal(response["pipelineIdList"][-1]["id"])
|
||||
response["pipelineIdList"].should.have.length_of(50)
|
||||
|
||||
|
||||
# testing a helper function
|
||||
def test_remove_capitalization_of_dict_keys():
|
||||
result = remove_capitalization_of_dict_keys(
|
||||
{
|
||||
"Id": "IdValue",
|
||||
"Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}],
|
||||
}
|
||||
)
|
||||
|
||||
result.should.equal(
|
||||
{
|
||||
"id": "IdValue",
|
||||
"fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}],
|
||||
}
|
||||
)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto.datapipeline
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_datapipeline_deprecated
|
||||
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
|
||||
|
||||
|
||||
def get_value_from_fields(key, fields):
|
||||
for field in fields:
|
||||
if field["key"] == key:
|
||||
return field["stringValue"]
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_create_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
|
||||
pipeline_id = res["pipelineId"]
|
||||
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
|
||||
"pipelineDescriptionList"
|
||||
]
|
||||
pipeline_descriptions.should.have.length_of(1)
|
||||
|
||||
pipeline_description = pipeline_descriptions[0]
|
||||
pipeline_description["name"].should.equal("mypipeline")
|
||||
pipeline_description["pipelineId"].should.equal(pipeline_id)
|
||||
fields = pipeline_description["fields"]
|
||||
|
||||
get_value_from_fields("@pipelineState", fields).should.equal("PENDING")
|
||||
get_value_from_fields("uniqueId", fields).should.equal("some-unique-id")
|
||||
|
||||
|
||||
PIPELINE_OBJECTS = [
|
||||
{
|
||||
"id": "Default",
|
||||
"name": "Default",
|
||||
"fields": [{"key": "workerGroup", "stringValue": "workerGroup"}],
|
||||
},
|
||||
{
|
||||
"id": "Schedule",
|
||||
"name": "Schedule",
|
||||
"fields": [
|
||||
{"key": "startDateTime", "stringValue": "2012-12-12T00:00:00"},
|
||||
{"key": "type", "stringValue": "Schedule"},
|
||||
{"key": "period", "stringValue": "1 hour"},
|
||||
{"key": "endDateTime", "stringValue": "2012-12-21T18:00:00"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "SayHello",
|
||||
"name": "SayHello",
|
||||
"fields": [
|
||||
{"key": "type", "stringValue": "ShellCommandActivity"},
|
||||
{"key": "command", "stringValue": "echo hello"},
|
||||
{"key": "parent", "refValue": "Default"},
|
||||
{"key": "schedule", "refValue": "Schedule"},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_creating_pipeline_definition():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||
|
||||
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
|
||||
pipeline_definition["pipelineObjects"].should.have.length_of(3)
|
||||
default_object = pipeline_definition["pipelineObjects"][0]
|
||||
default_object["name"].should.equal("Default")
|
||||
default_object["id"].should.equal("Default")
|
||||
default_object["fields"].should.equal(
|
||||
[{"key": "workerGroup", "stringValue": "workerGroup"}]
|
||||
)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_describing_pipeline_objects():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||
|
||||
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
|
||||
"pipelineObjects"
|
||||
]
|
||||
|
||||
objects.should.have.length_of(2)
|
||||
default_object = [x for x in objects if x["id"] == "Default"][0]
|
||||
default_object["name"].should.equal("Default")
|
||||
default_object["fields"].should.equal(
|
||||
[{"key": "workerGroup", "stringValue": "workerGroup"}]
|
||||
)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_activate_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
|
||||
pipeline_id = res["pipelineId"]
|
||||
conn.activate_pipeline(pipeline_id)
|
||||
|
||||
pipeline_descriptions = conn.describe_pipelines([pipeline_id])[
|
||||
"pipelineDescriptionList"
|
||||
]
|
||||
pipeline_descriptions.should.have.length_of(1)
|
||||
pipeline_description = pipeline_descriptions[0]
|
||||
fields = pipeline_description["fields"]
|
||||
|
||||
get_value_from_fields("@pipelineState", fields).should.equal("SCHEDULED")
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_delete_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.delete_pipeline(pipeline_id)
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["pipelineIdList"].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_listing_pipelines():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
|
||||
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["hasMoreResults"].should.be(False)
|
||||
response["marker"].should.be.none
|
||||
response["pipelineIdList"].should.have.length_of(2)
|
||||
response["pipelineIdList"].should.contain(
|
||||
{"id": res1["pipelineId"], "name": "mypipeline1"}
|
||||
)
|
||||
response["pipelineIdList"].should.contain(
|
||||
{"id": res2["pipelineId"], "name": "mypipeline2"}
|
||||
)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_listing_paginated_pipelines():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
for i in range(100):
|
||||
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["hasMoreResults"].should.be(True)
|
||||
response["marker"].should.equal(response["pipelineIdList"][-1]["id"])
|
||||
response["pipelineIdList"].should.have.length_of(50)
|
||||
|
||||
|
||||
# testing a helper function
|
||||
def test_remove_capitalization_of_dict_keys():
|
||||
result = remove_capitalization_of_dict_keys(
|
||||
{
|
||||
"Id": "IdValue",
|
||||
"Fields": [{"Key": "KeyValue", "StringValue": "StringValueValue"}],
|
||||
}
|
||||
)
|
||||
|
||||
result.should.equal(
|
||||
{
|
||||
"id": "IdValue",
|
||||
"fields": [{"key": "KeyValue", "stringValue": "StringValueValue"}],
|
||||
}
|
||||
)
|
||||
|
@ -1,470 +1,470 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
|
||||
from moto import mock_dynamodb_deprecated
|
||||
|
||||
from boto.dynamodb import condition
|
||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
|
||||
from boto.exception import DynamoDBResponseError
|
||||
|
||||
|
||||
def create_table(conn):
|
||||
message_table_schema = conn.create_schema(
|
||||
hash_key_name="forum_name",
|
||||
hash_key_proto_value=str,
|
||||
range_key_name="subject",
|
||||
range_key_proto_value=str,
|
||||
)
|
||||
|
||||
table = conn.create_table(
|
||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||
)
|
||||
return table
|
||||
|
||||
|
||||
@freeze_time("2012-01-14")
|
||||
@mock_dynamodb_deprecated
|
||||
def test_create_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
|
||||
expected = {
|
||||
"Table": {
|
||||
"CreationDateTime": 1326499200.0,
|
||||
"ItemCount": 0,
|
||||
"KeySchema": {
|
||||
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"},
|
||||
"RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"},
|
||||
},
|
||||
"ProvisionedThroughput": {
|
||||
"ReadCapacityUnits": 10,
|
||||
"WriteCapacityUnits": 10,
|
||||
},
|
||||
"TableName": "messages",
|
||||
"TableSizeBytes": 0,
|
||||
"TableStatus": "ACTIVE",
|
||||
}
|
||||
}
|
||||
conn.describe_table("messages").should.equal(expected)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
conn.list_tables().should.have.length_of(1)
|
||||
|
||||
conn.layer1.delete_table("messages")
|
||||
conn.list_tables().should.have.length_of(0)
|
||||
|
||||
conn.layer1.delete_table.when.called_with("messages").should.throw(
|
||||
DynamoDBResponseError
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_update_table_throughput():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
table.read_units.should.equal(10)
|
||||
table.write_units.should.equal(10)
|
||||
|
||||
table.update_throughput(5, 6)
|
||||
table.refresh()
|
||||
|
||||
table.read_units.should.equal(5)
|
||||
table.write_units.should.equal(6)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_add_and_describe_and_update():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.has_item("LOLCat Forum", "Check this out!").should.equal(True)
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key="LOLCat Forum",
|
||||
range_key="Check this out!",
|
||||
attributes_to_get=["Body", "SentBy"],
|
||||
)
|
||||
dict(returned_item).should.equal(
|
||||
{
|
||||
"forum_name": "LOLCat Forum",
|
||||
"subject": "Check this out!",
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
}
|
||||
)
|
||||
|
||||
item["SentBy"] = "User B"
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key="LOLCat Forum",
|
||||
range_key="Check this out!",
|
||||
attributes_to_get=["Body", "SentBy"],
|
||||
)
|
||||
dict(returned_item).should.equal(
|
||||
{
|
||||
"forum_name": "LOLCat Forum",
|
||||
"subject": "Check this out!",
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_put_without_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.put_item.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
item=dict(hash_key="LOLCat Forum", range_key="Check this out!"),
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_missing_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw(
|
||||
DynamoDBKeyNotFoundError
|
||||
)
|
||||
table.has_item("foobar", "more").should.equal(False)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.get_item.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
|
||||
).should.throw(DynamoDBKeyNotFoundError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_item_without_range_key():
|
||||
conn = boto.connect_dynamodb()
|
||||
message_table_schema = conn.create_schema(
|
||||
hash_key_name="test_hash",
|
||||
hash_key_proto_value=int,
|
||||
range_key_name="test_range",
|
||||
range_key_proto_value=int,
|
||||
)
|
||||
table = conn.create_table(
|
||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||
)
|
||||
|
||||
hash_key = 3241526475
|
||||
range_key = 1234567890987
|
||||
new_item = table.new_item(hash_key=hash_key, range_key=range_key)
|
||||
new_item.put()
|
||||
|
||||
table.get_item.when.called_with(hash_key=hash_key).should.throw(
|
||||
DynamoDBValidationError
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete()
|
||||
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_attribute_response():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete(return_values="ALL_OLD")
|
||||
response.should.equal(
|
||||
{
|
||||
"Attributes": {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"forum_name": "LOLCat Forum",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"SentBy": "User A",
|
||||
"subject": "Check this out!",
|
||||
},
|
||||
"ConsumedCapacityUnits": 0.5,
|
||||
}
|
||||
)
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.delete_item.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("1"))
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("234"))
|
||||
results.response["Items"].should.have.length_of(2)
|
||||
|
||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999"))
|
||||
results.response["Items"].should.have.length_of(0)
|
||||
|
||||
results = table.query(
|
||||
hash_key="the-key", range_key_condition=condition.CONTAINS("12")
|
||||
)
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.query(
|
||||
hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7")
|
||||
)
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.query(
|
||||
hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890")
|
||||
)
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.query.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
hash_key_value={"S": "the-key"},
|
||||
range_key_conditions={
|
||||
"AttributeValueList": [{"S": "User B"}],
|
||||
"ComparisonOperator": "EQ",
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
results = table.scan()
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.NULL()})
|
||||
results.response["Items"].should.have.length_of(2)
|
||||
|
||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
|
||||
results.response["Items"].should.have.length_of(0)
|
||||
|
||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.scan.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
scan_filter={
|
||||
"SentBy": {
|
||||
"AttributeValueList": [{"S": "User B"}],
|
||||
"ComparisonOperator": "EQ",
|
||||
}
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_after_has_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
table.has_item(hash_key="the-key", range_key="123")
|
||||
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_write_batch():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
|
||||
items = []
|
||||
items.append(
|
||||
table.new_item(
|
||||
hash_key="the-key",
|
||||
range_key="123",
|
||||
attrs={
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
items.append(
|
||||
table.new_item(
|
||||
hash_key="the-key",
|
||||
range_key="789",
|
||||
attrs={
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
batch_list.add_batch(table, puts=items)
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(2)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
batch_list.add_batch(table, deletes=[("the-key", "789")])
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_batch_read():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
}
|
||||
item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
items = table.batch_get_item([("the-key", "123"), ("another-key", "789")])
|
||||
# Iterate through so that batch_item gets called
|
||||
count = len([x for x in items])
|
||||
count.should.equal(2)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
|
||||
from moto import mock_dynamodb_deprecated
|
||||
|
||||
from boto.dynamodb import condition
|
||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
|
||||
from boto.exception import DynamoDBResponseError
|
||||
|
||||
|
||||
def create_table(conn):
|
||||
message_table_schema = conn.create_schema(
|
||||
hash_key_name="forum_name",
|
||||
hash_key_proto_value=str,
|
||||
range_key_name="subject",
|
||||
range_key_proto_value=str,
|
||||
)
|
||||
|
||||
table = conn.create_table(
|
||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||
)
|
||||
return table
|
||||
|
||||
|
||||
@freeze_time("2012-01-14")
|
||||
@mock_dynamodb_deprecated
|
||||
def test_create_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
|
||||
expected = {
|
||||
"Table": {
|
||||
"CreationDateTime": 1326499200.0,
|
||||
"ItemCount": 0,
|
||||
"KeySchema": {
|
||||
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"},
|
||||
"RangeKeyElement": {"AttributeName": "subject", "AttributeType": "S"},
|
||||
},
|
||||
"ProvisionedThroughput": {
|
||||
"ReadCapacityUnits": 10,
|
||||
"WriteCapacityUnits": 10,
|
||||
},
|
||||
"TableName": "messages",
|
||||
"TableSizeBytes": 0,
|
||||
"TableStatus": "ACTIVE",
|
||||
}
|
||||
}
|
||||
conn.describe_table("messages").should.equal(expected)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
conn.list_tables().should.have.length_of(1)
|
||||
|
||||
conn.layer1.delete_table("messages")
|
||||
conn.list_tables().should.have.length_of(0)
|
||||
|
||||
conn.layer1.delete_table.when.called_with("messages").should.throw(
|
||||
DynamoDBResponseError
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_update_table_throughput():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
table.read_units.should.equal(10)
|
||||
table.write_units.should.equal(10)
|
||||
|
||||
table.update_throughput(5, 6)
|
||||
table.refresh()
|
||||
|
||||
table.read_units.should.equal(5)
|
||||
table.write_units.should.equal(6)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_add_and_describe_and_update():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.has_item("LOLCat Forum", "Check this out!").should.equal(True)
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key="LOLCat Forum",
|
||||
range_key="Check this out!",
|
||||
attributes_to_get=["Body", "SentBy"],
|
||||
)
|
||||
dict(returned_item).should.equal(
|
||||
{
|
||||
"forum_name": "LOLCat Forum",
|
||||
"subject": "Check this out!",
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
}
|
||||
)
|
||||
|
||||
item["SentBy"] = "User B"
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key="LOLCat Forum",
|
||||
range_key="Check this out!",
|
||||
attributes_to_get=["Body", "SentBy"],
|
||||
)
|
||||
dict(returned_item).should.equal(
|
||||
{
|
||||
"forum_name": "LOLCat Forum",
|
||||
"subject": "Check this out!",
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_put_without_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.put_item.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
item=dict(hash_key="LOLCat Forum", range_key="Check this out!"),
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_missing_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
table.get_item.when.called_with(hash_key="tester", range_key="other").should.throw(
|
||||
DynamoDBKeyNotFoundError
|
||||
)
|
||||
table.has_item("foobar", "more").should.equal(False)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.get_item.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
|
||||
).should.throw(DynamoDBKeyNotFoundError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_item_without_range_key():
|
||||
conn = boto.connect_dynamodb()
|
||||
message_table_schema = conn.create_schema(
|
||||
hash_key_name="test_hash",
|
||||
hash_key_proto_value=int,
|
||||
range_key_name="test_range",
|
||||
range_key_proto_value=int,
|
||||
)
|
||||
table = conn.create_table(
|
||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||
)
|
||||
|
||||
hash_key = 3241526475
|
||||
range_key = 1234567890987
|
||||
new_item = table.new_item(hash_key=hash_key, range_key=range_key)
|
||||
new_item.put()
|
||||
|
||||
table.get_item.when.called_with(hash_key=hash_key).should.throw(
|
||||
DynamoDBValidationError
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete()
|
||||
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_attribute_response():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key="LOLCat Forum", range_key="Check this out!", attrs=item_data
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete(return_values="ALL_OLD")
|
||||
response.should.equal(
|
||||
{
|
||||
"Attributes": {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"forum_name": "LOLCat Forum",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"SentBy": "User A",
|
||||
"subject": "Check this out!",
|
||||
},
|
||||
"ConsumedCapacityUnits": 0.5,
|
||||
}
|
||||
)
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.delete_item.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
key={"HashKeyElement": {"S": "tester"}, "RangeKeyElement": {"S": "test-range"}},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("1"))
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("234"))
|
||||
results.response["Items"].should.have.length_of(2)
|
||||
|
||||
results = table.query(hash_key="the-key", range_key_condition=condition.GT("9999"))
|
||||
results.response["Items"].should.have.length_of(0)
|
||||
|
||||
results = table.query(
|
||||
hash_key="the-key", range_key_condition=condition.CONTAINS("12")
|
||||
)
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.query(
|
||||
hash_key="the-key", range_key_condition=condition.BEGINS_WITH("7")
|
||||
)
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.query(
|
||||
hash_key="the-key", range_key_condition=condition.BETWEEN("567", "890")
|
||||
)
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.query.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
hash_key_value={"S": "the-key"},
|
||||
range_key_conditions={
|
||||
"AttributeValueList": [{"S": "User B"}],
|
||||
"ComparisonOperator": "EQ",
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", range_key="789", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
results = table.scan()
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.NULL()})
|
||||
results.response["Items"].should.have.length_of(2)
|
||||
|
||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
|
||||
results.response["Items"].should.have.length_of(0)
|
||||
|
||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.scan.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
scan_filter={
|
||||
"SentBy": {
|
||||
"AttributeValueList": [{"S": "User B"}],
|
||||
"ComparisonOperator": "EQ",
|
||||
}
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_after_has_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
table.has_item(hash_key="the-key", range_key="123")
|
||||
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_write_batch():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
|
||||
items = []
|
||||
items.append(
|
||||
table.new_item(
|
||||
hash_key="the-key",
|
||||
range_key="123",
|
||||
attrs={
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
items.append(
|
||||
table.new_item(
|
||||
hash_key="the-key",
|
||||
range_key="789",
|
||||
attrs={
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
batch_list.add_batch(table, puts=items)
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(2)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
batch_list.add_batch(table, deletes=[("the-key", "789")])
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_batch_read():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", range_key="456", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key", range_key="123", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
}
|
||||
item = table.new_item(hash_key="another-key", range_key="789", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
items = table.batch_get_item([("the-key", "123"), ("another-key", "789")])
|
||||
# Iterate through so that batch_item gets called
|
||||
count = len([x for x in items])
|
||||
count.should.equal(2)
|
||||
|
@ -1,390 +1,390 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
|
||||
from moto import mock_dynamodb_deprecated
|
||||
|
||||
from boto.dynamodb import condition
|
||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
|
||||
from boto.exception import DynamoDBResponseError
|
||||
|
||||
|
||||
def create_table(conn):
|
||||
message_table_schema = conn.create_schema(
|
||||
hash_key_name="forum_name", hash_key_proto_value=str
|
||||
)
|
||||
|
||||
table = conn.create_table(
|
||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||
)
|
||||
return table
|
||||
|
||||
|
||||
@freeze_time("2012-01-14")
|
||||
@mock_dynamodb_deprecated
|
||||
def test_create_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
|
||||
expected = {
|
||||
"Table": {
|
||||
"CreationDateTime": 1326499200.0,
|
||||
"ItemCount": 0,
|
||||
"KeySchema": {
|
||||
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}
|
||||
},
|
||||
"ProvisionedThroughput": {
|
||||
"ReadCapacityUnits": 10,
|
||||
"WriteCapacityUnits": 10,
|
||||
},
|
||||
"TableName": "messages",
|
||||
"TableSizeBytes": 0,
|
||||
"TableStatus": "ACTIVE",
|
||||
}
|
||||
}
|
||||
conn.describe_table("messages").should.equal(expected)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
conn.list_tables().should.have.length_of(1)
|
||||
|
||||
conn.layer1.delete_table("messages")
|
||||
conn.list_tables().should.have.length_of(0)
|
||||
|
||||
conn.layer1.delete_table.when.called_with("messages").should.throw(
|
||||
DynamoDBResponseError
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_update_table_throughput():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
table.read_units.should.equal(10)
|
||||
table.write_units.should.equal(10)
|
||||
|
||||
table.update_throughput(5, 6)
|
||||
table.refresh()
|
||||
|
||||
table.read_units.should.equal(5)
|
||||
table.write_units.should.equal(6)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_add_and_describe_and_update():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
|
||||
)
|
||||
dict(returned_item).should.equal(
|
||||
{
|
||||
"forum_name": "LOLCat Forum",
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
}
|
||||
)
|
||||
|
||||
item["SentBy"] = "User B"
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
|
||||
)
|
||||
dict(returned_item).should.equal(
|
||||
{
|
||||
"forum_name": "LOLCat Forum",
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_put_without_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.put_item.when.called_with(
|
||||
table_name="undeclared-table", item=dict(hash_key="LOLCat Forum")
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_missing_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
table.get_item.when.called_with(hash_key="tester").should.throw(
|
||||
DynamoDBKeyNotFoundError
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.get_item.when.called_with(
|
||||
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
|
||||
).should.throw(DynamoDBKeyNotFoundError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete()
|
||||
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_attribute_response():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete(return_values="ALL_OLD")
|
||||
response.should.equal(
|
||||
{
|
||||
"Attributes": {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"forum_name": "LOLCat Forum",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"SentBy": "User A",
|
||||
},
|
||||
"ConsumedCapacityUnits": 0.5,
|
||||
}
|
||||
)
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.delete_item.when.called_with(
|
||||
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
results = table.query(hash_key="the-key")
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.query.when.called_with(
|
||||
table_name="undeclared-table", hash_key_value={"S": "the-key"}
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key2", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
}
|
||||
item = table.new_item(hash_key="the-key3", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
results = table.scan()
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.NULL()})
|
||||
results.response["Items"].should.have.length_of(2)
|
||||
|
||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
|
||||
results.response["Items"].should.have.length_of(0)
|
||||
|
||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.scan.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
scan_filter={
|
||||
"SentBy": {
|
||||
"AttributeValueList": [{"S": "User B"}],
|
||||
"ComparisonOperator": "EQ",
|
||||
}
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_after_has_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
table.has_item("the-key")
|
||||
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_write_batch():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
|
||||
items = []
|
||||
items.append(
|
||||
table.new_item(
|
||||
hash_key="the-key",
|
||||
attrs={
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
items.append(
|
||||
table.new_item(
|
||||
hash_key="the-key2",
|
||||
attrs={
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
batch_list.add_batch(table, puts=items)
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(2)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
batch_list.add_batch(table, deletes=[("the-key")])
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_batch_read():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key1", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key2", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
}
|
||||
item = table.new_item(hash_key="another-key", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
items = table.batch_get_item([("the-key1"), ("another-key")])
|
||||
# Iterate through so that batch_item gets called
|
||||
count = len([x for x in items])
|
||||
count.should.have.equal(2)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
|
||||
from moto import mock_dynamodb_deprecated
|
||||
|
||||
from boto.dynamodb import condition
|
||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
|
||||
from boto.exception import DynamoDBResponseError
|
||||
|
||||
|
||||
def create_table(conn):
|
||||
message_table_schema = conn.create_schema(
|
||||
hash_key_name="forum_name", hash_key_proto_value=str
|
||||
)
|
||||
|
||||
table = conn.create_table(
|
||||
name="messages", schema=message_table_schema, read_units=10, write_units=10
|
||||
)
|
||||
return table
|
||||
|
||||
|
||||
@freeze_time("2012-01-14")
|
||||
@mock_dynamodb_deprecated
|
||||
def test_create_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
|
||||
expected = {
|
||||
"Table": {
|
||||
"CreationDateTime": 1326499200.0,
|
||||
"ItemCount": 0,
|
||||
"KeySchema": {
|
||||
"HashKeyElement": {"AttributeName": "forum_name", "AttributeType": "S"}
|
||||
},
|
||||
"ProvisionedThroughput": {
|
||||
"ReadCapacityUnits": 10,
|
||||
"WriteCapacityUnits": 10,
|
||||
},
|
||||
"TableName": "messages",
|
||||
"TableSizeBytes": 0,
|
||||
"TableStatus": "ACTIVE",
|
||||
}
|
||||
}
|
||||
conn.describe_table("messages").should.equal(expected)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
conn.list_tables().should.have.length_of(1)
|
||||
|
||||
conn.layer1.delete_table("messages")
|
||||
conn.list_tables().should.have.length_of(0)
|
||||
|
||||
conn.layer1.delete_table.when.called_with("messages").should.throw(
|
||||
DynamoDBResponseError
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_update_table_throughput():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
table.read_units.should.equal(10)
|
||||
table.write_units.should.equal(10)
|
||||
|
||||
table.update_throughput(5, 6)
|
||||
table.refresh()
|
||||
|
||||
table.read_units.should.equal(5)
|
||||
table.write_units.should.equal(6)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_add_and_describe_and_update():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
|
||||
)
|
||||
dict(returned_item).should.equal(
|
||||
{
|
||||
"forum_name": "LOLCat Forum",
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
}
|
||||
)
|
||||
|
||||
item["SentBy"] = "User B"
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key="LOLCat Forum", attributes_to_get=["Body", "SentBy"]
|
||||
)
|
||||
dict(returned_item).should.equal(
|
||||
{
|
||||
"forum_name": "LOLCat Forum",
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_put_without_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.put_item.when.called_with(
|
||||
table_name="undeclared-table", item=dict(hash_key="LOLCat Forum")
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_missing_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
table.get_item.when.called_with(hash_key="tester").should.throw(
|
||||
DynamoDBKeyNotFoundError
|
||||
)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.get_item.when.called_with(
|
||||
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
|
||||
).should.throw(DynamoDBKeyNotFoundError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete()
|
||||
response.should.equal({"Attributes": [], "ConsumedCapacityUnits": 0.5})
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_attribute_response():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="LOLCat Forum", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete(return_values="ALL_OLD")
|
||||
response.should.equal(
|
||||
{
|
||||
"Attributes": {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"forum_name": "LOLCat Forum",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"SentBy": "User A",
|
||||
},
|
||||
"ConsumedCapacityUnits": 0.5,
|
||||
}
|
||||
)
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.delete_item.when.called_with(
|
||||
table_name="undeclared-table", key={"HashKeyElement": {"S": "tester"}}
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
results = table.query(hash_key="the-key")
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.query.when.called_with(
|
||||
table_name="undeclared-table", hash_key_value={"S": "the-key"}
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key2", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
}
|
||||
item = table.new_item(hash_key="the-key3", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
results = table.scan()
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={"SentBy": condition.EQ("User B")})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Body": condition.BEGINS_WITH("http")})
|
||||
results.response["Items"].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.CONTAINS(2)})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.NOT_NULL()})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={"Ids": condition.NULL()})
|
||||
results.response["Items"].should.have.length_of(2)
|
||||
|
||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(8, 9)})
|
||||
results.response["Items"].should.have.length_of(0)
|
||||
|
||||
results = table.scan(scan_filter={"PK": condition.BETWEEN(5, 8)})
|
||||
results.response["Items"].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.scan.when.called_with(
|
||||
table_name="undeclared-table",
|
||||
scan_filter={
|
||||
"SentBy": {
|
||||
"AttributeValueList": [{"S": "User B"}],
|
||||
"ComparisonOperator": "EQ",
|
||||
}
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_after_has_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
table.has_item("the-key")
|
||||
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_write_batch():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
|
||||
items = []
|
||||
items.append(
|
||||
table.new_item(
|
||||
hash_key="the-key",
|
||||
attrs={
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
items.append(
|
||||
table.new_item(
|
||||
hash_key="the-key2",
|
||||
attrs={
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
batch_list.add_batch(table, puts=items)
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(2)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
batch_list.add_batch(table, deletes=[("the-key")])
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_batch_read():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User A",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
}
|
||||
item = table.new_item(hash_key="the-key1", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(hash_key="the-key2", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
"Body": "http://url_to_lolcat.gif",
|
||||
"SentBy": "User B",
|
||||
"ReceivedTime": "12/9/2011 11:36:03 PM",
|
||||
"Ids": set([1, 2, 3]),
|
||||
"PK": 7,
|
||||
}
|
||||
item = table.new_item(hash_key="another-key", attrs=item_data)
|
||||
item.put()
|
||||
|
||||
items = table.batch_get_item([("the-key1"), ("another-key")])
|
||||
# Iterate through so that batch_item gets called
|
||||
count = len([x for x in items])
|
||||
count.should.have.equal(2)
|
||||
|
@ -1,37 +1,37 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
from moto import mock_ec2
|
||||
import sure # noqa
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_describe_account_attributes():
|
||||
conn = boto3.client("ec2", region_name="us-east-1")
|
||||
response = conn.describe_account_attributes()
|
||||
expected_attribute_values = [
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "5"}],
|
||||
"AttributeName": "vpc-max-security-groups-per-interface",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "20"}],
|
||||
"AttributeName": "max-instances",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}],
|
||||
"AttributeName": "supported-platforms",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "none"}],
|
||||
"AttributeName": "default-vpc",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "5"}],
|
||||
"AttributeName": "max-elastic-ips",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "5"}],
|
||||
"AttributeName": "vpc-max-elastic-ips",
|
||||
},
|
||||
]
|
||||
response["AccountAttributes"].should.equal(expected_attribute_values)
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
from moto import mock_ec2
|
||||
import sure # noqa
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_describe_account_attributes():
|
||||
conn = boto3.client("ec2", region_name="us-east-1")
|
||||
response = conn.describe_account_attributes()
|
||||
expected_attribute_values = [
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "5"}],
|
||||
"AttributeName": "vpc-max-security-groups-per-interface",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "20"}],
|
||||
"AttributeName": "max-instances",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "EC2"}, {"AttributeValue": "VPC"}],
|
||||
"AttributeName": "supported-platforms",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "none"}],
|
||||
"AttributeName": "default-vpc",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "5"}],
|
||||
"AttributeName": "max-elastic-ips",
|
||||
},
|
||||
{
|
||||
"AttributeValues": [{"AttributeValue": "5"}],
|
||||
"AttributeName": "vpc-max-elastic-ips",
|
||||
},
|
||||
]
|
||||
response["AccountAttributes"].should.equal(expected_attribute_values)
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_amazon_dev_pay():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_amazon_dev_pay():
|
||||
pass
|
||||
|
@ -1 +1 @@
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import unicode_literals
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_ip_addresses():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_ip_addresses():
|
||||
pass
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_monitoring():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_monitoring():
|
||||
pass
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_placement_groups():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_placement_groups():
|
||||
pass
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_reserved_instances():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_reserved_instances():
|
||||
pass
|
||||
|
@ -1,96 +1,96 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_virtual_private_gateways():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
vpn_gateway.should_not.be.none
|
||||
vpn_gateway.id.should.match(r"vgw-\w+")
|
||||
vpn_gateway.type.should.equal("ipsec.1")
|
||||
vpn_gateway.state.should.equal("available")
|
||||
vpn_gateway.availability_zone.should.equal("us-east-1a")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_vpn_gateway():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
|
||||
vgws = conn.get_all_vpn_gateways()
|
||||
vgws.should.have.length_of(1)
|
||||
|
||||
gateway = vgws[0]
|
||||
gateway.id.should.match(r"vgw-\w+")
|
||||
gateway.id.should.equal(vpn_gateway.id)
|
||||
vpn_gateway.type.should.equal("ipsec.1")
|
||||
vpn_gateway.state.should.equal("available")
|
||||
vpn_gateway.availability_zone.should.equal("us-east-1a")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_vpn_gateway_vpc_attachment():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
|
||||
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(1)
|
||||
attachments[0].vpc_id.should.equal(vpc.id)
|
||||
attachments[0].state.should.equal("attached")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_gateway():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
|
||||
conn.delete_vpn_gateway(vpn_gateway.id)
|
||||
vgws = conn.get_all_vpn_gateways()
|
||||
vgws.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_vpn_gateway_tagging():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
vpn_gateway.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
# Refresh the subnet
|
||||
vpn_gateway = conn.get_all_vpn_gateways()[0]
|
||||
vpn_gateway.tags.should.have.length_of(1)
|
||||
vpn_gateway.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_detach_vpn_gateway():
|
||||
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
|
||||
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(1)
|
||||
attachments[0].vpc_id.should.equal(vpc.id)
|
||||
attachments[0].state.should.equal("attached")
|
||||
|
||||
conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(0)
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_virtual_private_gateways():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
vpn_gateway.should_not.be.none
|
||||
vpn_gateway.id.should.match(r"vgw-\w+")
|
||||
vpn_gateway.type.should.equal("ipsec.1")
|
||||
vpn_gateway.state.should.equal("available")
|
||||
vpn_gateway.availability_zone.should.equal("us-east-1a")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_vpn_gateway():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
|
||||
vgws = conn.get_all_vpn_gateways()
|
||||
vgws.should.have.length_of(1)
|
||||
|
||||
gateway = vgws[0]
|
||||
gateway.id.should.match(r"vgw-\w+")
|
||||
gateway.id.should.equal(vpn_gateway.id)
|
||||
vpn_gateway.type.should.equal("ipsec.1")
|
||||
vpn_gateway.state.should.equal("available")
|
||||
vpn_gateway.availability_zone.should.equal("us-east-1a")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_vpn_gateway_vpc_attachment():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
|
||||
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(1)
|
||||
attachments[0].vpc_id.should.equal(vpc.id)
|
||||
attachments[0].state.should.equal("attached")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_gateway():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
|
||||
conn.delete_vpn_gateway(vpn_gateway.id)
|
||||
vgws = conn.get_all_vpn_gateways()
|
||||
vgws.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_vpn_gateway_tagging():
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
vpn_gateway.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
# Refresh the subnet
|
||||
vpn_gateway = conn.get_all_vpn_gateways()[0]
|
||||
vpn_gateway.tags.should.have.length_of(1)
|
||||
vpn_gateway.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_detach_vpn_gateway():
|
||||
|
||||
conn = boto.connect_vpc("the_key", "the_secret")
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
vpn_gateway = conn.create_vpn_gateway("ipsec.1", "us-east-1a")
|
||||
|
||||
conn.attach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(1)
|
||||
attachments[0].vpc_id.should.equal(vpc.id)
|
||||
attachments[0].state.should.equal("attached")
|
||||
|
||||
conn.detach_vpn_gateway(vpn_gateway_id=vpn_gateway.id, vpc_id=vpc.id)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(0)
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vm_export():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vm_export():
|
||||
pass
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vm_import():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vm_import():
|
||||
pass
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_windows():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_windows():
|
||||
pass
|
||||
|
@ -1,21 +1,21 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from tempfile import NamedTemporaryFile
|
||||
import boto.glacier
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_create_and_delete_archive():
|
||||
the_file = NamedTemporaryFile(delete=False)
|
||||
the_file.write(b"some stuff")
|
||||
the_file.close()
|
||||
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
vault = conn.create_vault("my_vault")
|
||||
|
||||
archive_id = vault.upload_archive(the_file.name)
|
||||
|
||||
vault.delete_archive(archive_id)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from tempfile import NamedTemporaryFile
|
||||
import boto.glacier
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_create_and_delete_archive():
|
||||
the_file = NamedTemporaryFile(delete=False)
|
||||
the_file.write(b"some stuff")
|
||||
the_file.close()
|
||||
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
vault = conn.create_vault("my_vault")
|
||||
|
||||
archive_id = vault.upload_archive(the_file.name)
|
||||
|
||||
vault.delete_archive(archive_id)
|
||||
|
@ -1,31 +1,31 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto.glacier
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_create_vault():
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
|
||||
conn.create_vault("my_vault")
|
||||
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(1)
|
||||
vaults[0].name.should.equal("my_vault")
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_delete_vault():
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
|
||||
conn.create_vault("my_vault")
|
||||
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(1)
|
||||
|
||||
conn.delete_vault("my_vault")
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(0)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto.glacier
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_create_vault():
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
|
||||
conn.create_vault("my_vault")
|
||||
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(1)
|
||||
vaults[0].name.should.equal("my_vault")
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_delete_vault():
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
|
||||
conn.create_vault("my_vault")
|
||||
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(1)
|
||||
|
||||
conn.delete_vault("my_vault")
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(0)
|
||||
|
@ -1 +1 @@
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import unicode_literals
|
||||
|
@ -1 +1 @@
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import unicode_literals
|
||||
|
@ -1,97 +1,97 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
|
||||
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
|
||||
|
||||
|
||||
def create_database(client, database_name):
|
||||
return client.create_database(DatabaseInput={"Name": database_name})
|
||||
|
||||
|
||||
def get_database(client, database_name):
|
||||
return client.get_database(Name=database_name)
|
||||
|
||||
|
||||
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
|
||||
table_input = copy.deepcopy(TABLE_INPUT)
|
||||
table_input["Name"] = table_name
|
||||
table_input["PartitionKeys"] = partition_keys
|
||||
table_input["StorageDescriptor"]["Columns"] = columns
|
||||
table_input["StorageDescriptor"][
|
||||
"Location"
|
||||
] = "s3://my-bucket/{database_name}/{table_name}".format(
|
||||
database_name=database_name, table_name=table_name
|
||||
)
|
||||
return table_input
|
||||
|
||||
|
||||
def create_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.create_table(DatabaseName=database_name, TableInput=table_input)
|
||||
|
||||
|
||||
def update_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.update_table(DatabaseName=database_name, TableInput=table_input)
|
||||
|
||||
|
||||
def get_table(client, database_name, table_name):
|
||||
return client.get_table(DatabaseName=database_name, Name=table_name)
|
||||
|
||||
|
||||
def get_tables(client, database_name):
|
||||
return client.get_tables(DatabaseName=database_name)
|
||||
|
||||
|
||||
def get_table_versions(client, database_name, table_name):
|
||||
return client.get_table_versions(DatabaseName=database_name, TableName=table_name)
|
||||
|
||||
|
||||
def get_table_version(client, database_name, table_name, version_id):
|
||||
return client.get_table_version(
|
||||
DatabaseName=database_name, TableName=table_name, VersionId=version_id
|
||||
)
|
||||
|
||||
|
||||
def create_partition_input(database_name, table_name, values=[], columns=[]):
|
||||
root_path = "s3://my-bucket/{database_name}/{table_name}".format(
|
||||
database_name=database_name, table_name=table_name
|
||||
)
|
||||
|
||||
part_input = copy.deepcopy(PARTITION_INPUT)
|
||||
part_input["Values"] = values
|
||||
part_input["StorageDescriptor"]["Columns"] = columns
|
||||
part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path
|
||||
return part_input
|
||||
|
||||
|
||||
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.create_partition(
|
||||
DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input
|
||||
)
|
||||
|
||||
|
||||
def update_partition(
|
||||
client, database_name, table_name, old_values=[], partiton_input=None, **kwargs
|
||||
):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.update_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionInput=partiton_input,
|
||||
PartitionValueList=old_values,
|
||||
)
|
||||
|
||||
|
||||
def get_partition(client, database_name, table_name, values):
|
||||
return client.get_partition(
|
||||
DatabaseName=database_name, TableName=table_name, PartitionValues=values
|
||||
)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
|
||||
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
|
||||
|
||||
|
||||
def create_database(client, database_name):
|
||||
return client.create_database(DatabaseInput={"Name": database_name})
|
||||
|
||||
|
||||
def get_database(client, database_name):
|
||||
return client.get_database(Name=database_name)
|
||||
|
||||
|
||||
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
|
||||
table_input = copy.deepcopy(TABLE_INPUT)
|
||||
table_input["Name"] = table_name
|
||||
table_input["PartitionKeys"] = partition_keys
|
||||
table_input["StorageDescriptor"]["Columns"] = columns
|
||||
table_input["StorageDescriptor"][
|
||||
"Location"
|
||||
] = "s3://my-bucket/{database_name}/{table_name}".format(
|
||||
database_name=database_name, table_name=table_name
|
||||
)
|
||||
return table_input
|
||||
|
||||
|
||||
def create_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.create_table(DatabaseName=database_name, TableInput=table_input)
|
||||
|
||||
|
||||
def update_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.update_table(DatabaseName=database_name, TableInput=table_input)
|
||||
|
||||
|
||||
def get_table(client, database_name, table_name):
|
||||
return client.get_table(DatabaseName=database_name, Name=table_name)
|
||||
|
||||
|
||||
def get_tables(client, database_name):
|
||||
return client.get_tables(DatabaseName=database_name)
|
||||
|
||||
|
||||
def get_table_versions(client, database_name, table_name):
|
||||
return client.get_table_versions(DatabaseName=database_name, TableName=table_name)
|
||||
|
||||
|
||||
def get_table_version(client, database_name, table_name, version_id):
|
||||
return client.get_table_version(
|
||||
DatabaseName=database_name, TableName=table_name, VersionId=version_id
|
||||
)
|
||||
|
||||
|
||||
def create_partition_input(database_name, table_name, values=[], columns=[]):
|
||||
root_path = "s3://my-bucket/{database_name}/{table_name}".format(
|
||||
database_name=database_name, table_name=table_name
|
||||
)
|
||||
|
||||
part_input = copy.deepcopy(PARTITION_INPUT)
|
||||
part_input["Values"] = values
|
||||
part_input["StorageDescriptor"]["Columns"] = columns
|
||||
part_input["StorageDescriptor"]["SerdeInfo"]["Parameters"]["path"] = root_path
|
||||
return part_input
|
||||
|
||||
|
||||
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.create_partition(
|
||||
DatabaseName=database_name, TableName=table_name, PartitionInput=partiton_input
|
||||
)
|
||||
|
||||
|
||||
def update_partition(
|
||||
client, database_name, table_name, old_values=[], partiton_input=None, **kwargs
|
||||
):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.update_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionInput=partiton_input,
|
||||
PartitionValueList=old_values,
|
||||
)
|
||||
|
||||
|
||||
def get_partition(client, database_name, table_name, values):
|
||||
return client.get_partition(
|
||||
DatabaseName=database_name, TableName=table_name, PartitionValues=values
|
||||
)
|
||||
|
@ -9,6 +9,173 @@ from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_attach_policy():
|
||||
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||
policy_name = "my-policy"
|
||||
doc = "{}"
|
||||
|
||||
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||
cert_arn = cert["certificateArn"]
|
||||
client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||
client.attach_policy(policyName=policy_name, target=cert_arn)
|
||||
|
||||
res = client.list_attached_policies(target=cert_arn)
|
||||
res.should.have.key("policies").which.should.have.length_of(1)
|
||||
res["policies"][0]["policyName"].should.equal("my-policy")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_detach_policy():
|
||||
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||
policy_name = "my-policy"
|
||||
doc = "{}"
|
||||
|
||||
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||
cert_arn = cert["certificateArn"]
|
||||
client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||
client.attach_policy(policyName=policy_name, target=cert_arn)
|
||||
|
||||
res = client.list_attached_policies(target=cert_arn)
|
||||
res.should.have.key("policies").which.should.have.length_of(1)
|
||||
res["policies"][0]["policyName"].should.equal("my-policy")
|
||||
|
||||
client.detach_policy(policyName=policy_name, target=cert_arn)
|
||||
res = client.list_attached_policies(target=cert_arn)
|
||||
res.should.have.key("policies").which.should.be.empty
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_list_attached_policies():
|
||||
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||
cert = client.create_keys_and_certificate(setAsActive=True)
|
||||
policies = client.list_attached_policies(target=cert["certificateArn"])
|
||||
policies["policies"].should.be.empty
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_policy_versions():
|
||||
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||
policy_name = "my-policy"
|
||||
doc = "{}"
|
||||
|
||||
policy = client.create_policy(policyName=policy_name, policyDocument=doc)
|
||||
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||
policy.should.have.key("policyArn").which.should_not.be.none
|
||||
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
|
||||
policy.should.have.key("policyVersionId").which.should.equal("1")
|
||||
|
||||
policy = client.get_policy(policyName=policy_name)
|
||||
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||
policy.should.have.key("policyArn").which.should_not.be.none
|
||||
policy.should.have.key("policyDocument").which.should.equal(json.dumps({}))
|
||||
policy.should.have.key("defaultVersionId").which.should.equal(
|
||||
policy["defaultVersionId"]
|
||||
)
|
||||
|
||||
policy1 = client.create_policy_version(
|
||||
policyName=policy_name,
|
||||
policyDocument=json.dumps({"version": "version_1"}),
|
||||
setAsDefault=True,
|
||||
)
|
||||
policy1.should.have.key("policyArn").which.should_not.be.none
|
||||
policy1.should.have.key("policyDocument").which.should.equal(
|
||||
json.dumps({"version": "version_1"})
|
||||
)
|
||||
policy1.should.have.key("policyVersionId").which.should.equal("2")
|
||||
policy1.should.have.key("isDefaultVersion").which.should.equal(True)
|
||||
|
||||
policy2 = client.create_policy_version(
|
||||
policyName=policy_name,
|
||||
policyDocument=json.dumps({"version": "version_2"}),
|
||||
setAsDefault=False,
|
||||
)
|
||||
policy2.should.have.key("policyArn").which.should_not.be.none
|
||||
policy2.should.have.key("policyDocument").which.should.equal(
|
||||
json.dumps({"version": "version_2"})
|
||||
)
|
||||
policy2.should.have.key("policyVersionId").which.should.equal("3")
|
||||
policy2.should.have.key("isDefaultVersion").which.should.equal(False)
|
||||
|
||||
policy = client.get_policy(policyName=policy_name)
|
||||
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||
policy.should.have.key("policyArn").which.should_not.be.none
|
||||
policy.should.have.key("policyDocument").which.should.equal(
|
||||
json.dumps({"version": "version_1"})
|
||||
)
|
||||
policy.should.have.key("defaultVersionId").which.should.equal(
|
||||
policy1["policyVersionId"]
|
||||
)
|
||||
|
||||
policy_versions = client.list_policy_versions(policyName=policy_name)
|
||||
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
|
||||
list(
|
||||
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
|
||||
).count(True).should.equal(1)
|
||||
default_policy = list(
|
||||
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
|
||||
)
|
||||
default_policy[0].should.have.key("versionId").should.equal(
|
||||
policy1["policyVersionId"]
|
||||
)
|
||||
|
||||
policy = client.get_policy(policyName=policy_name)
|
||||
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||
policy.should.have.key("policyArn").which.should_not.be.none
|
||||
policy.should.have.key("policyDocument").which.should.equal(
|
||||
json.dumps({"version": "version_1"})
|
||||
)
|
||||
policy.should.have.key("defaultVersionId").which.should.equal(
|
||||
policy1["policyVersionId"]
|
||||
)
|
||||
|
||||
client.set_default_policy_version(
|
||||
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
|
||||
)
|
||||
policy_versions = client.list_policy_versions(policyName=policy_name)
|
||||
policy_versions.should.have.key("policyVersions").which.should.have.length_of(3)
|
||||
list(
|
||||
map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
|
||||
).count(True).should.equal(1)
|
||||
default_policy = list(
|
||||
filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"])
|
||||
)
|
||||
default_policy[0].should.have.key("versionId").should.equal(
|
||||
policy2["policyVersionId"]
|
||||
)
|
||||
|
||||
policy = client.get_policy(policyName=policy_name)
|
||||
policy.should.have.key("policyName").which.should.equal(policy_name)
|
||||
policy.should.have.key("policyArn").which.should_not.be.none
|
||||
policy.should.have.key("policyDocument").which.should.equal(
|
||||
json.dumps({"version": "version_2"})
|
||||
)
|
||||
policy.should.have.key("defaultVersionId").which.should.equal(
|
||||
policy2["policyVersionId"]
|
||||
)
|
||||
|
||||
client.delete_policy_version(policyName=policy_name, policyVersionId="1")
|
||||
policy_versions = client.list_policy_versions(policyName=policy_name)
|
||||
policy_versions.should.have.key("policyVersions").which.should.have.length_of(2)
|
||||
|
||||
client.delete_policy_version(
|
||||
policyName=policy_name, policyVersionId=policy1["policyVersionId"]
|
||||
)
|
||||
policy_versions = client.list_policy_versions(policyName=policy_name)
|
||||
policy_versions.should.have.key("policyVersions").which.should.have.length_of(1)
|
||||
|
||||
# should fail as it"s the default policy. Should use delete_policy instead
|
||||
try:
|
||||
client.delete_policy_version(
|
||||
policyName=policy_name, policyVersionId=policy2["policyVersionId"]
|
||||
)
|
||||
assert False, "Should have failed in previous call"
|
||||
except Exception as exception:
|
||||
exception.response["Error"]["Message"].should.equal(
|
||||
"Cannot delete the default version of a policy"
|
||||
)
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_things():
|
||||
client = boto3.client("iot", region_name="ap-northeast-1")
|
||||
@ -994,7 +1161,10 @@ def test_create_job():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
# thing# job document
|
||||
# job_document = {
|
||||
# "field": "value"
|
||||
# }
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
@ -1020,6 +1190,63 @@ def test_create_job():
|
||||
job.should.have.key("description")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_list_jobs():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing# job document
|
||||
# job_document = {
|
||||
# "field": "value"
|
||||
# }
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
# job document
|
||||
job_document = {"field": "value"}
|
||||
|
||||
job1 = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
document=json.dumps(job_document),
|
||||
description="Description",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job1.should.have.key("jobId").which.should.equal(job_id)
|
||||
job1.should.have.key("jobArn")
|
||||
job1.should.have.key("description")
|
||||
|
||||
job2 = client.create_job(
|
||||
jobId=job_id + "1",
|
||||
targets=[thing["thingArn"]],
|
||||
document=json.dumps(job_document),
|
||||
description="Description",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job2.should.have.key("jobId").which.should.equal(job_id + "1")
|
||||
job2.should.have.key("jobArn")
|
||||
job2.should.have.key("description")
|
||||
|
||||
jobs = client.list_jobs()
|
||||
jobs.should.have.key("jobs")
|
||||
jobs.should_not.have.key("nextToken")
|
||||
jobs["jobs"][0].should.have.key("jobId").which.should.equal(job_id)
|
||||
jobs["jobs"][1].should.have.key("jobId").which.should.equal(job_id + "1")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_describe_job():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
@ -1124,3 +1351,387 @@ def test_describe_job_1():
|
||||
job.should.have.key("job").which.should.have.key(
|
||||
"jobExecutionsRolloutConfig"
|
||||
).which.should.have.key("maximumPerMinute").which.should.equal(10)
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_delete_job():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
|
||||
job = client.describe_job(jobId=job_id)
|
||||
job.should.have.key("job")
|
||||
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
|
||||
|
||||
client.delete_job(jobId=job_id)
|
||||
|
||||
client.list_jobs()["jobs"].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_cancel_job():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
|
||||
job = client.describe_job(jobId=job_id)
|
||||
job.should.have.key("job")
|
||||
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
|
||||
|
||||
job = client.cancel_job(jobId=job_id, reasonCode="Because", comment="You are")
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
|
||||
job = client.describe_job(jobId=job_id)
|
||||
job.should.have.key("job")
|
||||
job.should.have.key("job").which.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("job").which.should.have.key("status").which.should.equal(
|
||||
"CANCELED"
|
||||
)
|
||||
job.should.have.key("job").which.should.have.key(
|
||||
"forceCanceled"
|
||||
).which.should.equal(False)
|
||||
job.should.have.key("job").which.should.have.key("reasonCode").which.should.equal(
|
||||
"Because"
|
||||
)
|
||||
job.should.have.key("job").which.should.have.key("comment").which.should.equal(
|
||||
"You are"
|
||||
)
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_get_job_document_with_document_source():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
documentSource="https://s3-eu-west-1.amazonaws.com/bucket-name/job_document.json",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
|
||||
job_document = client.get_job_document(jobId=job_id)
|
||||
job_document.should.have.key("document").which.should.equal("")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_get_job_document_with_document():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
# job document
|
||||
job_document = {"field": "value"}
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
document=json.dumps(job_document),
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
|
||||
job_document = client.get_job_document(jobId=job_id)
|
||||
job_document.should.have.key("document").which.should.equal('{"field": "value"}')
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_describe_job_execution():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
# job document
|
||||
job_document = {"field": "value"}
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
document=json.dumps(job_document),
|
||||
description="Description",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
job.should.have.key("description")
|
||||
|
||||
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
|
||||
job_execution.should.have.key("execution")
|
||||
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
|
||||
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
|
||||
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
|
||||
False
|
||||
)
|
||||
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
|
||||
{"detailsMap": {}}
|
||||
)
|
||||
job_execution["execution"].should.have.key("thingArn").which.should.equal(
|
||||
thing["thingArn"]
|
||||
)
|
||||
job_execution["execution"].should.have.key("queuedAt")
|
||||
job_execution["execution"].should.have.key("startedAt")
|
||||
job_execution["execution"].should.have.key("lastUpdatedAt")
|
||||
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
|
||||
123
|
||||
)
|
||||
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
|
||||
job_execution["execution"].should.have.key(
|
||||
"approximateSecondsBeforeTimedOut"
|
||||
).which.should.equal(123)
|
||||
|
||||
job_execution = client.describe_job_execution(
|
||||
jobId=job_id, thingName=name, executionNumber=123
|
||||
)
|
||||
job_execution.should.have.key("execution")
|
||||
job_execution["execution"].should.have.key("jobId").which.should.equal(job_id)
|
||||
job_execution["execution"].should.have.key("status").which.should.equal("QUEUED")
|
||||
job_execution["execution"].should.have.key("forceCanceled").which.should.equal(
|
||||
False
|
||||
)
|
||||
job_execution["execution"].should.have.key("statusDetails").which.should.equal(
|
||||
{"detailsMap": {}}
|
||||
)
|
||||
job_execution["execution"].should.have.key("thingArn").which.should.equal(
|
||||
thing["thingArn"]
|
||||
)
|
||||
job_execution["execution"].should.have.key("queuedAt")
|
||||
job_execution["execution"].should.have.key("startedAt")
|
||||
job_execution["execution"].should.have.key("lastUpdatedAt")
|
||||
job_execution["execution"].should.have.key("executionNumber").which.should.equal(
|
||||
123
|
||||
)
|
||||
job_execution["execution"].should.have.key("versionNumber").which.should.equal(123)
|
||||
job_execution["execution"].should.have.key(
|
||||
"approximateSecondsBeforeTimedOut"
|
||||
).which.should.equal(123)
|
||||
|
||||
try:
|
||||
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=456)
|
||||
except ClientError as exc:
|
||||
error_code = exc.response["Error"]["Code"]
|
||||
error_code.should.equal("ResourceNotFoundException")
|
||||
else:
|
||||
raise Exception("Should have raised error")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_cancel_job_execution():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
# job document
|
||||
job_document = {"field": "value"}
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
document=json.dumps(job_document),
|
||||
description="Description",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
job.should.have.key("description")
|
||||
|
||||
client.cancel_job_execution(jobId=job_id, thingName=name)
|
||||
job_execution = client.describe_job_execution(jobId=job_id, thingName=name)
|
||||
job_execution.should.have.key("execution")
|
||||
job_execution["execution"].should.have.key("status").which.should.equal("CANCELED")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_delete_job_execution():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
# job document
|
||||
job_document = {"field": "value"}
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
document=json.dumps(job_document),
|
||||
description="Description",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
job.should.have.key("description")
|
||||
|
||||
client.delete_job_execution(jobId=job_id, thingName=name, executionNumber=123)
|
||||
try:
|
||||
client.describe_job_execution(jobId=job_id, thingName=name, executionNumber=123)
|
||||
except ClientError as exc:
|
||||
error_code = exc.response["Error"]["Code"]
|
||||
error_code.should.equal("ResourceNotFoundException")
|
||||
else:
|
||||
raise Exception("Should have raised error")
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_list_job_executions_for_job():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
# job document
|
||||
job_document = {"field": "value"}
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
document=json.dumps(job_document),
|
||||
description="Description",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
job.should.have.key("description")
|
||||
|
||||
job_execution = client.list_job_executions_for_job(jobId=job_id)
|
||||
job_execution.should.have.key("executionSummaries")
|
||||
job_execution["executionSummaries"][0].should.have.key(
|
||||
"thingArn"
|
||||
).which.should.equal(thing["thingArn"])
|
||||
|
||||
|
||||
@mock_iot
|
||||
def test_list_job_executions_for_thing():
|
||||
client = boto3.client("iot", region_name="eu-west-1")
|
||||
name = "my-thing"
|
||||
job_id = "TestJob"
|
||||
# thing
|
||||
thing = client.create_thing(thingName=name)
|
||||
thing.should.have.key("thingName").which.should.equal(name)
|
||||
thing.should.have.key("thingArn")
|
||||
|
||||
# job document
|
||||
job_document = {"field": "value"}
|
||||
|
||||
job = client.create_job(
|
||||
jobId=job_id,
|
||||
targets=[thing["thingArn"]],
|
||||
document=json.dumps(job_document),
|
||||
description="Description",
|
||||
presignedUrlConfig={
|
||||
"roleArn": "arn:aws:iam::1:role/service-role/iot_job_role",
|
||||
"expiresInSec": 123,
|
||||
},
|
||||
targetSelection="CONTINUOUS",
|
||||
jobExecutionsRolloutConfig={"maximumPerMinute": 10},
|
||||
)
|
||||
|
||||
job.should.have.key("jobId").which.should.equal(job_id)
|
||||
job.should.have.key("jobArn")
|
||||
job.should.have.key("description")
|
||||
|
||||
job_execution = client.list_job_executions_for_thing(thingName=name)
|
||||
job_execution.should.have.key("executionSummaries")
|
||||
job_execution["executionSummaries"][0].should.have.key("jobId").which.should.equal(
|
||||
job_id
|
||||
)
|
||||
|
@ -1,76 +1,76 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
from freezegun import freeze_time
|
||||
import sure # noqa
|
||||
import re
|
||||
|
||||
from moto import mock_opsworks
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@mock_opsworks
|
||||
def test_create_app_response():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
response = client.create_app(StackId=stack_id, Type="other", Name="TestApp")
|
||||
|
||||
response.should.contain("AppId")
|
||||
|
||||
second_stack_id = client.create_stack(
|
||||
Name="test_stack_2",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp")
|
||||
|
||||
response.should.contain("AppId")
|
||||
|
||||
# ClientError
|
||||
client.create_app.when.called_with(
|
||||
StackId=stack_id, Type="other", Name="TestApp"
|
||||
).should.throw(Exception, re.compile(r'already an app named "TestApp"'))
|
||||
|
||||
# ClientError
|
||||
client.create_app.when.called_with(
|
||||
StackId="nothere", Type="other", Name="TestApp"
|
||||
).should.throw(Exception, "nothere")
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@mock_opsworks
|
||||
def test_describe_apps():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"]
|
||||
|
||||
rv1 = client.describe_apps(StackId=stack_id)
|
||||
rv2 = client.describe_apps(AppIds=[app_id])
|
||||
rv1["Apps"].should.equal(rv2["Apps"])
|
||||
|
||||
rv1["Apps"][0]["Name"].should.equal("TestApp")
|
||||
|
||||
# ClientError
|
||||
client.describe_apps.when.called_with(
|
||||
StackId=stack_id, AppIds=[app_id]
|
||||
).should.throw(Exception, "Please provide one or more app IDs or a stack ID")
|
||||
# ClientError
|
||||
client.describe_apps.when.called_with(StackId="nothere").should.throw(
|
||||
Exception, "Unable to find stack with ID nothere"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
from freezegun import freeze_time
|
||||
import sure # noqa
|
||||
import re
|
||||
|
||||
from moto import mock_opsworks
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@mock_opsworks
|
||||
def test_create_app_response():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
response = client.create_app(StackId=stack_id, Type="other", Name="TestApp")
|
||||
|
||||
response.should.contain("AppId")
|
||||
|
||||
second_stack_id = client.create_stack(
|
||||
Name="test_stack_2",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
response = client.create_app(StackId=second_stack_id, Type="other", Name="TestApp")
|
||||
|
||||
response.should.contain("AppId")
|
||||
|
||||
# ClientError
|
||||
client.create_app.when.called_with(
|
||||
StackId=stack_id, Type="other", Name="TestApp"
|
||||
).should.throw(Exception, re.compile(r'already an app named "TestApp"'))
|
||||
|
||||
# ClientError
|
||||
client.create_app.when.called_with(
|
||||
StackId="nothere", Type="other", Name="TestApp"
|
||||
).should.throw(Exception, "nothere")
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@mock_opsworks
|
||||
def test_describe_apps():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
app_id = client.create_app(StackId=stack_id, Type="other", Name="TestApp")["AppId"]
|
||||
|
||||
rv1 = client.describe_apps(StackId=stack_id)
|
||||
rv2 = client.describe_apps(AppIds=[app_id])
|
||||
rv1["Apps"].should.equal(rv2["Apps"])
|
||||
|
||||
rv1["Apps"][0]["Name"].should.equal("TestApp")
|
||||
|
||||
# ClientError
|
||||
client.describe_apps.when.called_with(
|
||||
StackId=stack_id, AppIds=[app_id]
|
||||
).should.throw(Exception, "Please provide one or more app IDs or a stack ID")
|
||||
# ClientError
|
||||
client.describe_apps.when.called_with(StackId="nothere").should.throw(
|
||||
Exception, "Unable to find stack with ID nothere"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_apps.when.called_with(AppIds=["nothere"]).should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
|
@ -1,206 +1,206 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_opsworks
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_opsworks
|
||||
def test_create_instance():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
layer_id = client.create_layer(
|
||||
StackId=stack_id,
|
||||
Type="custom",
|
||||
Name="TestLayer",
|
||||
Shortname="TestLayerShortName",
|
||||
)["LayerId"]
|
||||
|
||||
second_stack_id = client.create_stack(
|
||||
Name="test_stack_2",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
second_layer_id = client.create_layer(
|
||||
StackId=second_stack_id,
|
||||
Type="custom",
|
||||
Name="SecondTestLayer",
|
||||
Shortname="SecondTestLayerShortName",
|
||||
)["LayerId"]
|
||||
|
||||
response = client.create_instance(
|
||||
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
|
||||
)
|
||||
|
||||
response.should.contain("InstanceId")
|
||||
|
||||
client.create_instance.when.called_with(
|
||||
StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro"
|
||||
).should.throw(Exception, "Unable to find stack with ID nothere")
|
||||
|
||||
client.create_instance.when.called_with(
|
||||
StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro"
|
||||
).should.throw(Exception, "nothere")
|
||||
# ClientError
|
||||
client.create_instance.when.called_with(
|
||||
StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro"
|
||||
).should.throw(Exception, "Please only provide layer IDs from the same stack")
|
||||
# ClientError
|
||||
client.start_instance.when.called_with(InstanceId="nothere").should.throw(
|
||||
Exception, "Unable to find instance with ID nothere"
|
||||
)
|
||||
|
||||
|
||||
@mock_opsworks
|
||||
def test_describe_instances():
|
||||
"""
|
||||
create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2)
|
||||
|
||||
populate S1L1 with 2 instances (S1L1_i1, S1L1_i2)
|
||||
populate S2L1 with 1 instance (S2L1_i1)
|
||||
populate S2L2 with 3 instances (S2L2_i1..2)
|
||||
"""
|
||||
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
S1 = client.create_stack(
|
||||
Name="S1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
S1L1 = client.create_layer(
|
||||
StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1"
|
||||
)["LayerId"]
|
||||
S2 = client.create_stack(
|
||||
Name="S2",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
S2L1 = client.create_layer(
|
||||
StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1"
|
||||
)["LayerId"]
|
||||
S2L2 = client.create_layer(
|
||||
StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2"
|
||||
)["LayerId"]
|
||||
|
||||
S1L1_i1 = client.create_instance(
|
||||
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
S1L1_i2 = client.create_instance(
|
||||
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
S2L1_i1 = client.create_instance(
|
||||
StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
S2L2_i1 = client.create_instance(
|
||||
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
S2L2_i2 = client.create_instance(
|
||||
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
|
||||
# instances in Stack 1
|
||||
response = client.describe_instances(StackId=S1)["Instances"]
|
||||
response.should.have.length_of(2)
|
||||
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"]
|
||||
sorted(response2, key=lambda d: d["InstanceId"]).should.equal(
|
||||
sorted(response, key=lambda d: d["InstanceId"])
|
||||
)
|
||||
|
||||
response3 = client.describe_instances(LayerId=S1L1)["Instances"]
|
||||
sorted(response3, key=lambda d: d["InstanceId"]).should.equal(
|
||||
sorted(response, key=lambda d: d["InstanceId"])
|
||||
)
|
||||
|
||||
response = client.describe_instances(StackId=S1)["Instances"]
|
||||
response.should.have.length_of(2)
|
||||
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
# instances in Stack 2
|
||||
response = client.describe_instances(StackId=S2)["Instances"]
|
||||
response.should.have.length_of(3)
|
||||
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
S2L2_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
S2L2_i2.should.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
response = client.describe_instances(LayerId=S2L1)["Instances"]
|
||||
response.should.have.length_of(1)
|
||||
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
response = client.describe_instances(LayerId=S2L2)["Instances"]
|
||||
response.should.have.length_of(2)
|
||||
S2L1_i1.should_not.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
# ClientError
|
||||
client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw(
|
||||
Exception, "Please provide either one or more"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_instances.when.called_with(StackId="nothere").should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_instances.when.called_with(LayerId="nothere").should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
|
||||
|
||||
@mock_opsworks
|
||||
@mock_ec2
|
||||
def test_ec2_integration():
|
||||
"""
|
||||
instances created via OpsWorks should be discoverable via ec2
|
||||
"""
|
||||
|
||||
opsworks = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = opsworks.create_stack(
|
||||
Name="S1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
layer_id = opsworks.create_layer(
|
||||
StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1"
|
||||
)["LayerId"]
|
||||
|
||||
instance_id = opsworks.create_instance(
|
||||
StackId=stack_id,
|
||||
LayerIds=[layer_id],
|
||||
InstanceType="t2.micro",
|
||||
SshKeyName="testSSH",
|
||||
)["InstanceId"]
|
||||
|
||||
ec2 = boto3.client("ec2", region_name="us-east-1")
|
||||
|
||||
# Before starting the instance, it shouldn't be discoverable via ec2
|
||||
reservations = ec2.describe_instances()["Reservations"]
|
||||
assert reservations.should.be.empty
|
||||
|
||||
# After starting the instance, it should be discoverable via ec2
|
||||
opsworks.start_instance(InstanceId=instance_id)
|
||||
reservations = ec2.describe_instances()["Reservations"]
|
||||
reservations[0]["Instances"].should.have.length_of(1)
|
||||
instance = reservations[0]["Instances"][0]
|
||||
opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0]
|
||||
|
||||
instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"])
|
||||
instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"])
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_opsworks
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_opsworks
|
||||
def test_create_instance():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
layer_id = client.create_layer(
|
||||
StackId=stack_id,
|
||||
Type="custom",
|
||||
Name="TestLayer",
|
||||
Shortname="TestLayerShortName",
|
||||
)["LayerId"]
|
||||
|
||||
second_stack_id = client.create_stack(
|
||||
Name="test_stack_2",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
second_layer_id = client.create_layer(
|
||||
StackId=second_stack_id,
|
||||
Type="custom",
|
||||
Name="SecondTestLayer",
|
||||
Shortname="SecondTestLayerShortName",
|
||||
)["LayerId"]
|
||||
|
||||
response = client.create_instance(
|
||||
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
|
||||
)
|
||||
|
||||
response.should.contain("InstanceId")
|
||||
|
||||
client.create_instance.when.called_with(
|
||||
StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro"
|
||||
).should.throw(Exception, "Unable to find stack with ID nothere")
|
||||
|
||||
client.create_instance.when.called_with(
|
||||
StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro"
|
||||
).should.throw(Exception, "nothere")
|
||||
# ClientError
|
||||
client.create_instance.when.called_with(
|
||||
StackId=stack_id, LayerIds=[second_layer_id], InstanceType="t2.micro"
|
||||
).should.throw(Exception, "Please only provide layer IDs from the same stack")
|
||||
# ClientError
|
||||
client.start_instance.when.called_with(InstanceId="nothere").should.throw(
|
||||
Exception, "Unable to find instance with ID nothere"
|
||||
)
|
||||
|
||||
|
||||
@mock_opsworks
|
||||
def test_describe_instances():
|
||||
"""
|
||||
create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2)
|
||||
|
||||
populate S1L1 with 2 instances (S1L1_i1, S1L1_i2)
|
||||
populate S2L1 with 1 instance (S2L1_i1)
|
||||
populate S2L2 with 3 instances (S2L2_i1..2)
|
||||
"""
|
||||
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
S1 = client.create_stack(
|
||||
Name="S1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
S1L1 = client.create_layer(
|
||||
StackId=S1, Type="custom", Name="S1L1", Shortname="S1L1"
|
||||
)["LayerId"]
|
||||
S2 = client.create_stack(
|
||||
Name="S2",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
S2L1 = client.create_layer(
|
||||
StackId=S2, Type="custom", Name="S2L1", Shortname="S2L1"
|
||||
)["LayerId"]
|
||||
S2L2 = client.create_layer(
|
||||
StackId=S2, Type="custom", Name="S2L2", Shortname="S2L2"
|
||||
)["LayerId"]
|
||||
|
||||
S1L1_i1 = client.create_instance(
|
||||
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
S1L1_i2 = client.create_instance(
|
||||
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
S2L1_i1 = client.create_instance(
|
||||
StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
S2L2_i1 = client.create_instance(
|
||||
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
S2L2_i2 = client.create_instance(
|
||||
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
|
||||
)["InstanceId"]
|
||||
|
||||
# instances in Stack 1
|
||||
response = client.describe_instances(StackId=S1)["Instances"]
|
||||
response.should.have.length_of(2)
|
||||
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])["Instances"]
|
||||
sorted(response2, key=lambda d: d["InstanceId"]).should.equal(
|
||||
sorted(response, key=lambda d: d["InstanceId"])
|
||||
)
|
||||
|
||||
response3 = client.describe_instances(LayerId=S1L1)["Instances"]
|
||||
sorted(response3, key=lambda d: d["InstanceId"]).should.equal(
|
||||
sorted(response, key=lambda d: d["InstanceId"])
|
||||
)
|
||||
|
||||
response = client.describe_instances(StackId=S1)["Instances"]
|
||||
response.should.have.length_of(2)
|
||||
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
# instances in Stack 2
|
||||
response = client.describe_instances(StackId=S2)["Instances"]
|
||||
response.should.have.length_of(3)
|
||||
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
S2L2_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
S2L2_i2.should.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
response = client.describe_instances(LayerId=S2L1)["Instances"]
|
||||
response.should.have.length_of(1)
|
||||
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
response = client.describe_instances(LayerId=S2L2)["Instances"]
|
||||
response.should.have.length_of(2)
|
||||
S2L1_i1.should_not.be.within([i["InstanceId"] for i in response])
|
||||
|
||||
# ClientError
|
||||
client.describe_instances.when.called_with(StackId=S1, LayerId=S1L1).should.throw(
|
||||
Exception, "Please provide either one or more"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_instances.when.called_with(StackId="nothere").should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_instances.when.called_with(LayerId="nothere").should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_instances.when.called_with(InstanceIds=["nothere"]).should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
|
||||
|
||||
@mock_opsworks
|
||||
@mock_ec2
|
||||
def test_ec2_integration():
|
||||
"""
|
||||
instances created via OpsWorks should be discoverable via ec2
|
||||
"""
|
||||
|
||||
opsworks = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = opsworks.create_stack(
|
||||
Name="S1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
layer_id = opsworks.create_layer(
|
||||
StackId=stack_id, Type="custom", Name="S1L1", Shortname="S1L1"
|
||||
)["LayerId"]
|
||||
|
||||
instance_id = opsworks.create_instance(
|
||||
StackId=stack_id,
|
||||
LayerIds=[layer_id],
|
||||
InstanceType="t2.micro",
|
||||
SshKeyName="testSSH",
|
||||
)["InstanceId"]
|
||||
|
||||
ec2 = boto3.client("ec2", region_name="us-east-1")
|
||||
|
||||
# Before starting the instance, it shouldn't be discoverable via ec2
|
||||
reservations = ec2.describe_instances()["Reservations"]
|
||||
assert reservations.should.be.empty
|
||||
|
||||
# After starting the instance, it should be discoverable via ec2
|
||||
opsworks.start_instance(InstanceId=instance_id)
|
||||
reservations = ec2.describe_instances()["Reservations"]
|
||||
reservations[0]["Instances"].should.have.length_of(1)
|
||||
instance = reservations[0]["Instances"][0]
|
||||
opsworks_instance = opsworks.describe_instances(StackId=stack_id)["Instances"][0]
|
||||
|
||||
instance["InstanceId"].should.equal(opsworks_instance["Ec2InstanceId"])
|
||||
instance["PrivateIpAddress"].should.equal(opsworks_instance["PrivateIp"])
|
||||
|
@ -1,96 +1,96 @@
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
from freezegun import freeze_time
|
||||
import sure # noqa
|
||||
import re
|
||||
|
||||
from moto import mock_opsworks
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@mock_opsworks
|
||||
def test_create_layer_response():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
response = client.create_layer(
|
||||
StackId=stack_id,
|
||||
Type="custom",
|
||||
Name="TestLayer",
|
||||
Shortname="TestLayerShortName",
|
||||
)
|
||||
|
||||
response.should.contain("LayerId")
|
||||
|
||||
second_stack_id = client.create_stack(
|
||||
Name="test_stack_2",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
response = client.create_layer(
|
||||
StackId=second_stack_id,
|
||||
Type="custom",
|
||||
Name="TestLayer",
|
||||
Shortname="TestLayerShortName",
|
||||
)
|
||||
|
||||
response.should.contain("LayerId")
|
||||
|
||||
# ClientError
|
||||
client.create_layer.when.called_with(
|
||||
StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_"
|
||||
).should.throw(Exception, re.compile(r'already a layer named "TestLayer"'))
|
||||
# ClientError
|
||||
client.create_layer.when.called_with(
|
||||
StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName"
|
||||
).should.throw(
|
||||
Exception, re.compile(r'already a layer with shortname "TestLayerShortName"')
|
||||
)
|
||||
# ClientError
|
||||
client.create_layer.when.called_with(
|
||||
StackId="nothere", Type="custom", Name="TestLayer", Shortname="_"
|
||||
).should.throw(Exception, "nothere")
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@mock_opsworks
|
||||
def test_describe_layers():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
layer_id = client.create_layer(
|
||||
StackId=stack_id,
|
||||
Type="custom",
|
||||
Name="TestLayer",
|
||||
Shortname="TestLayerShortName",
|
||||
)["LayerId"]
|
||||
|
||||
rv1 = client.describe_layers(StackId=stack_id)
|
||||
rv2 = client.describe_layers(LayerIds=[layer_id])
|
||||
rv1["Layers"].should.equal(rv2["Layers"])
|
||||
|
||||
rv1["Layers"][0]["Name"].should.equal("TestLayer")
|
||||
|
||||
# ClientError
|
||||
client.describe_layers.when.called_with(
|
||||
StackId=stack_id, LayerIds=[layer_id]
|
||||
).should.throw(Exception, "Please provide one or more layer IDs or a stack ID")
|
||||
# ClientError
|
||||
client.describe_layers.when.called_with(StackId="nothere").should.throw(
|
||||
Exception, "Unable to find stack with ID nothere"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
from freezegun import freeze_time
|
||||
import sure # noqa
|
||||
import re
|
||||
|
||||
from moto import mock_opsworks
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@mock_opsworks
|
||||
def test_create_layer_response():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
response = client.create_layer(
|
||||
StackId=stack_id,
|
||||
Type="custom",
|
||||
Name="TestLayer",
|
||||
Shortname="TestLayerShortName",
|
||||
)
|
||||
|
||||
response.should.contain("LayerId")
|
||||
|
||||
second_stack_id = client.create_stack(
|
||||
Name="test_stack_2",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
|
||||
response = client.create_layer(
|
||||
StackId=second_stack_id,
|
||||
Type="custom",
|
||||
Name="TestLayer",
|
||||
Shortname="TestLayerShortName",
|
||||
)
|
||||
|
||||
response.should.contain("LayerId")
|
||||
|
||||
# ClientError
|
||||
client.create_layer.when.called_with(
|
||||
StackId=stack_id, Type="custom", Name="TestLayer", Shortname="_"
|
||||
).should.throw(Exception, re.compile(r'already a layer named "TestLayer"'))
|
||||
# ClientError
|
||||
client.create_layer.when.called_with(
|
||||
StackId=stack_id, Type="custom", Name="_", Shortname="TestLayerShortName"
|
||||
).should.throw(
|
||||
Exception, re.compile(r'already a layer with shortname "TestLayerShortName"')
|
||||
)
|
||||
# ClientError
|
||||
client.create_layer.when.called_with(
|
||||
StackId="nothere", Type="custom", Name="TestLayer", Shortname="_"
|
||||
).should.throw(Exception, "nothere")
|
||||
|
||||
|
||||
@freeze_time("2015-01-01")
|
||||
@mock_opsworks
|
||||
def test_describe_layers():
|
||||
client = boto3.client("opsworks", region_name="us-east-1")
|
||||
stack_id = client.create_stack(
|
||||
Name="test_stack_1",
|
||||
Region="us-east-1",
|
||||
ServiceRoleArn="service_arn",
|
||||
DefaultInstanceProfileArn="profile_arn",
|
||||
)["StackId"]
|
||||
layer_id = client.create_layer(
|
||||
StackId=stack_id,
|
||||
Type="custom",
|
||||
Name="TestLayer",
|
||||
Shortname="TestLayerShortName",
|
||||
)["LayerId"]
|
||||
|
||||
rv1 = client.describe_layers(StackId=stack_id)
|
||||
rv2 = client.describe_layers(LayerIds=[layer_id])
|
||||
rv1["Layers"].should.equal(rv2["Layers"])
|
||||
|
||||
rv1["Layers"][0]["Name"].should.equal("TestLayer")
|
||||
|
||||
# ClientError
|
||||
client.describe_layers.when.called_with(
|
||||
StackId=stack_id, LayerIds=[layer_id]
|
||||
).should.throw(Exception, "Please provide one or more layer IDs or a stack ID")
|
||||
# ClientError
|
||||
client.describe_layers.when.called_with(StackId="nothere").should.throw(
|
||||
Exception, "Unable to find stack with ID nothere"
|
||||
)
|
||||
# ClientError
|
||||
client.describe_layers.when.called_with(LayerIds=["nothere"]).should.throw(
|
||||
Exception, "nothere"
|
||||
)
|
||||
|
@ -1,263 +1,263 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from moto import mock_polly
|
||||
|
||||
# Polly only available in a few regions
|
||||
DEFAULT_REGION = "eu-west-1"
|
||||
|
||||
LEXICON_XML = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<lexicon version="1.0"
|
||||
xmlns="http://www.w3.org/2005/01/pronunciation-lexicon"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.w3.org/2005/01/pronunciation-lexicon
|
||||
http://www.w3.org/TR/2007/CR-pronunciation-lexicon-20071212/pls.xsd"
|
||||
alphabet="ipa"
|
||||
xml:lang="en-US">
|
||||
<lexeme>
|
||||
<grapheme>W3C</grapheme>
|
||||
<alias>World Wide Web Consortium</alias>
|
||||
</lexeme>
|
||||
</lexicon>"""
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_describe_voices():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
resp = client.describe_voices()
|
||||
len(resp["Voices"]).should.be.greater_than(1)
|
||||
|
||||
resp = client.describe_voices(LanguageCode="en-GB")
|
||||
len(resp["Voices"]).should.equal(3)
|
||||
|
||||
try:
|
||||
client.describe_voices(LanguageCode="SOME_LANGUAGE")
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("400")
|
||||
else:
|
||||
raise RuntimeError("Should of raised an exception")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_put_list_lexicon():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
# Return nothing
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
resp = client.list_lexicons()
|
||||
len(resp["Lexicons"]).should.equal(1)
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_put_get_lexicon():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
# Return nothing
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
resp = client.get_lexicon(Name="test")
|
||||
resp.should.contain("Lexicon")
|
||||
resp.should.contain("LexiconAttributes")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_put_lexicon_bad_name():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
try:
|
||||
client.put_lexicon(Name="test-invalid", Content=LEXICON_XML)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
else:
|
||||
raise RuntimeError("Should of raised an exception")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
# Return nothing
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg"))
|
||||
for output_format, content_type in tests:
|
||||
resp = client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat=output_format,
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
resp["ContentType"].should.equal(content_type)
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_lexicon():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test2"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("LexiconNotFoundException")
|
||||
else:
|
||||
raise RuntimeError("Should of raised LexiconNotFoundException")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_output_format():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="invalid",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_sample_rate():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="18000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidSampleRateException")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_text_type():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="invalid",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_voice_id():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Luke",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_text_too_long():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234" * 376, # = 3008 characters
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("TextLengthExceededException")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_speech_marks1():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
SpeechMarkTypes=["word"],
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal(
|
||||
"MarksNotSupportedForFormatException"
|
||||
)
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_speech_marks2():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="ssml",
|
||||
SpeechMarkTypes=["word"],
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal(
|
||||
"MarksNotSupportedForFormatException"
|
||||
)
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from moto import mock_polly
|
||||
|
||||
# Polly only available in a few regions
|
||||
DEFAULT_REGION = "eu-west-1"
|
||||
|
||||
LEXICON_XML = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<lexicon version="1.0"
|
||||
xmlns="http://www.w3.org/2005/01/pronunciation-lexicon"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.w3.org/2005/01/pronunciation-lexicon
|
||||
http://www.w3.org/TR/2007/CR-pronunciation-lexicon-20071212/pls.xsd"
|
||||
alphabet="ipa"
|
||||
xml:lang="en-US">
|
||||
<lexeme>
|
||||
<grapheme>W3C</grapheme>
|
||||
<alias>World Wide Web Consortium</alias>
|
||||
</lexeme>
|
||||
</lexicon>"""
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_describe_voices():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
resp = client.describe_voices()
|
||||
len(resp["Voices"]).should.be.greater_than(1)
|
||||
|
||||
resp = client.describe_voices(LanguageCode="en-GB")
|
||||
len(resp["Voices"]).should.equal(3)
|
||||
|
||||
try:
|
||||
client.describe_voices(LanguageCode="SOME_LANGUAGE")
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("400")
|
||||
else:
|
||||
raise RuntimeError("Should of raised an exception")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_put_list_lexicon():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
# Return nothing
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
resp = client.list_lexicons()
|
||||
len(resp["Lexicons"]).should.equal(1)
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_put_get_lexicon():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
# Return nothing
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
resp = client.get_lexicon(Name="test")
|
||||
resp.should.contain("Lexicon")
|
||||
resp.should.contain("LexiconAttributes")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_put_lexicon_bad_name():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
try:
|
||||
client.put_lexicon(Name="test-invalid", Content=LEXICON_XML)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
else:
|
||||
raise RuntimeError("Should of raised an exception")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
|
||||
# Return nothing
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
tests = (("pcm", "audio/pcm"), ("mp3", "audio/mpeg"), ("ogg_vorbis", "audio/ogg"))
|
||||
for output_format, content_type in tests:
|
||||
resp = client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat=output_format,
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
resp["ContentType"].should.equal(content_type)
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_lexicon():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test2"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("LexiconNotFoundException")
|
||||
else:
|
||||
raise RuntimeError("Should of raised LexiconNotFoundException")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_output_format():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="invalid",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_sample_rate():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="18000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidSampleRateException")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_text_type():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="invalid",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_voice_id():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
VoiceId="Luke",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_text_too_long():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234" * 376, # = 3008 characters
|
||||
TextType="text",
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal("TextLengthExceededException")
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_speech_marks1():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="text",
|
||||
SpeechMarkTypes=["word"],
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal(
|
||||
"MarksNotSupportedForFormatException"
|
||||
)
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
||||
|
||||
@mock_polly
|
||||
def test_synthesize_speech_bad_speech_marks2():
|
||||
client = boto3.client("polly", region_name=DEFAULT_REGION)
|
||||
client.put_lexicon(Name="test", Content=LEXICON_XML)
|
||||
|
||||
try:
|
||||
client.synthesize_speech(
|
||||
LexiconNames=["test"],
|
||||
OutputFormat="pcm",
|
||||
SampleRate="16000",
|
||||
Text="test1234",
|
||||
TextType="ssml",
|
||||
SpeechMarkTypes=["word"],
|
||||
VoiceId="Astrid",
|
||||
)
|
||||
except ClientError as err:
|
||||
err.response["Error"]["Code"].should.equal(
|
||||
"MarksNotSupportedForFormatException"
|
||||
)
|
||||
else:
|
||||
raise RuntimeError("Should of raised ")
|
||||
|
@ -1,107 +1,107 @@
|
||||
from collections import namedtuple
|
||||
import sure # noqa
|
||||
|
||||
from moto.swf.exceptions import SWFUnknownResourceFault
|
||||
from moto.swf.models import Domain
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
|
||||
# Fake WorkflowExecution for tests purposes
|
||||
WorkflowExecution = namedtuple(
|
||||
"WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"]
|
||||
)
|
||||
|
||||
|
||||
def test_domain_short_dict_representation():
|
||||
domain = Domain("foo", "52")
|
||||
domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"})
|
||||
|
||||
domain.description = "foo bar"
|
||||
domain.to_short_dict()["description"].should.equal("foo bar")
|
||||
|
||||
|
||||
def test_domain_full_dict_representation():
|
||||
domain = Domain("foo", "52")
|
||||
|
||||
domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict())
|
||||
_config = domain.to_full_dict()["configuration"]
|
||||
_config["workflowExecutionRetentionPeriodInDays"].should.equal("52")
|
||||
|
||||
|
||||
def test_domain_string_representation():
|
||||
domain = Domain("my-domain", "60")
|
||||
str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)")
|
||||
|
||||
|
||||
def test_domain_add_to_activity_task_list():
|
||||
domain = Domain("my-domain", "60")
|
||||
domain.add_to_activity_task_list("foo", "bar")
|
||||
domain.activity_task_lists.should.equal({"foo": ["bar"]})
|
||||
|
||||
|
||||
def test_domain_activity_tasks():
|
||||
domain = Domain("my-domain", "60")
|
||||
domain.add_to_activity_task_list("foo", "bar")
|
||||
domain.add_to_activity_task_list("other", "baz")
|
||||
sorted(domain.activity_tasks).should.equal(["bar", "baz"])
|
||||
|
||||
|
||||
def test_domain_add_to_decision_task_list():
|
||||
domain = Domain("my-domain", "60")
|
||||
domain.add_to_decision_task_list("foo", "bar")
|
||||
domain.decision_task_lists.should.equal({"foo": ["bar"]})
|
||||
|
||||
|
||||
def test_domain_decision_tasks():
|
||||
domain = Domain("my-domain", "60")
|
||||
domain.add_to_decision_task_list("foo", "bar")
|
||||
domain.add_to_decision_task_list("other", "baz")
|
||||
sorted(domain.decision_tasks).should.equal(["bar", "baz"])
|
||||
|
||||
|
||||
def test_domain_get_workflow_execution():
|
||||
domain = Domain("my-domain", "60")
|
||||
|
||||
wfe1 = WorkflowExecution(
|
||||
workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True
|
||||
)
|
||||
wfe2 = WorkflowExecution(
|
||||
workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False
|
||||
)
|
||||
wfe3 = WorkflowExecution(
|
||||
workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True
|
||||
)
|
||||
wfe4 = WorkflowExecution(
|
||||
workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False
|
||||
)
|
||||
domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4]
|
||||
|
||||
# get workflow execution through workflow_id and run_id
|
||||
domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1)
|
||||
domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2)
|
||||
domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4)
|
||||
|
||||
domain.get_workflow_execution.when.called_with(
|
||||
"wf-id-1", run_id="non-existent"
|
||||
).should.throw(SWFUnknownResourceFault)
|
||||
|
||||
# get OPEN workflow execution by default if no run_id
|
||||
domain.get_workflow_execution("wf-id-1").should.equal(wfe1)
|
||||
domain.get_workflow_execution.when.called_with("wf-id-3").should.throw(
|
||||
SWFUnknownResourceFault
|
||||
)
|
||||
domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw(
|
||||
SWFUnknownResourceFault
|
||||
)
|
||||
|
||||
# raise_if_closed attribute
|
||||
domain.get_workflow_execution(
|
||||
"wf-id-1", run_id="run-id-1", raise_if_closed=True
|
||||
).should.equal(wfe1)
|
||||
domain.get_workflow_execution.when.called_with(
|
||||
"wf-id-3", run_id="run-id-4", raise_if_closed=True
|
||||
).should.throw(SWFUnknownResourceFault)
|
||||
|
||||
# raise_if_none attribute
|
||||
domain.get_workflow_execution("foo", raise_if_none=False).should.be.none
|
||||
from collections import namedtuple
|
||||
import sure # noqa
|
||||
|
||||
from moto.swf.exceptions import SWFUnknownResourceFault
|
||||
from moto.swf.models import Domain
|
||||
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
|
||||
# Fake WorkflowExecution for tests purposes
|
||||
WorkflowExecution = namedtuple(
|
||||
"WorkflowExecution", ["workflow_id", "run_id", "execution_status", "open"]
|
||||
)
|
||||
|
||||
|
||||
def test_domain_short_dict_representation():
|
||||
domain = Domain("foo", "52")
|
||||
domain.to_short_dict().should.equal({"name": "foo", "status": "REGISTERED"})
|
||||
|
||||
domain.description = "foo bar"
|
||||
domain.to_short_dict()["description"].should.equal("foo bar")
|
||||
|
||||
|
||||
def test_domain_full_dict_representation():
|
||||
domain = Domain("foo", "52")
|
||||
|
||||
domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict())
|
||||
_config = domain.to_full_dict()["configuration"]
|
||||
_config["workflowExecutionRetentionPeriodInDays"].should.equal("52")
|
||||
|
||||
|
||||
def test_domain_string_representation():
|
||||
domain = Domain("my-domain", "60")
|
||||
str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)")
|
||||
|
||||
|
||||
def test_domain_add_to_activity_task_list():
|
||||
domain = Domain("my-domain", "60")
|
||||
domain.add_to_activity_task_list("foo", "bar")
|
||||
domain.activity_task_lists.should.equal({"foo": ["bar"]})
|
||||
|
||||
|
||||
def test_domain_activity_tasks():
|
||||
domain = Domain("my-domain", "60")
|
||||
domain.add_to_activity_task_list("foo", "bar")
|
||||
domain.add_to_activity_task_list("other", "baz")
|
||||
sorted(domain.activity_tasks).should.equal(["bar", "baz"])
|
||||
|
||||
|
||||
def test_domain_add_to_decision_task_list():
|
||||
domain = Domain("my-domain", "60")
|
||||
domain.add_to_decision_task_list("foo", "bar")
|
||||
domain.decision_task_lists.should.equal({"foo": ["bar"]})
|
||||
|
||||
|
||||
def test_domain_decision_tasks():
|
||||
domain = Domain("my-domain", "60")
|
||||
domain.add_to_decision_task_list("foo", "bar")
|
||||
domain.add_to_decision_task_list("other", "baz")
|
||||
sorted(domain.decision_tasks).should.equal(["bar", "baz"])
|
||||
|
||||
|
||||
def test_domain_get_workflow_execution():
|
||||
domain = Domain("my-domain", "60")
|
||||
|
||||
wfe1 = WorkflowExecution(
|
||||
workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True
|
||||
)
|
||||
wfe2 = WorkflowExecution(
|
||||
workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False
|
||||
)
|
||||
wfe3 = WorkflowExecution(
|
||||
workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True
|
||||
)
|
||||
wfe4 = WorkflowExecution(
|
||||
workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False
|
||||
)
|
||||
domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4]
|
||||
|
||||
# get workflow execution through workflow_id and run_id
|
||||
domain.get_workflow_execution("wf-id-1", run_id="run-id-1").should.equal(wfe1)
|
||||
domain.get_workflow_execution("wf-id-1", run_id="run-id-2").should.equal(wfe2)
|
||||
domain.get_workflow_execution("wf-id-3", run_id="run-id-4").should.equal(wfe4)
|
||||
|
||||
domain.get_workflow_execution.when.called_with(
|
||||
"wf-id-1", run_id="non-existent"
|
||||
).should.throw(SWFUnknownResourceFault)
|
||||
|
||||
# get OPEN workflow execution by default if no run_id
|
||||
domain.get_workflow_execution("wf-id-1").should.equal(wfe1)
|
||||
domain.get_workflow_execution.when.called_with("wf-id-3").should.throw(
|
||||
SWFUnknownResourceFault
|
||||
)
|
||||
domain.get_workflow_execution.when.called_with("wf-id-non-existent").should.throw(
|
||||
SWFUnknownResourceFault
|
||||
)
|
||||
|
||||
# raise_if_closed attribute
|
||||
domain.get_workflow_execution(
|
||||
"wf-id-1", run_id="run-id-1", raise_if_closed=True
|
||||
).should.equal(wfe1)
|
||||
domain.get_workflow_execution.when.called_with(
|
||||
"wf-id-3", run_id="run-id-4", raise_if_closed=True
|
||||
).should.throw(SWFUnknownResourceFault)
|
||||
|
||||
# raise_if_none attribute
|
||||
domain.get_workflow_execution("foo", raise_if_none=False).should.be.none
|
||||
|
@ -1,19 +1,19 @@
|
||||
from freezegun import freeze_time
|
||||
import sure # noqa
|
||||
|
||||
from moto.swf.models import Timeout
|
||||
|
||||
from ..utils import make_workflow_execution
|
||||
|
||||
|
||||
def test_timeout_creation():
|
||||
wfe = make_workflow_execution()
|
||||
|
||||
# epoch 1420113600 == "2015-01-01 13:00:00"
|
||||
timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE")
|
||||
|
||||
with freeze_time("2015-01-01 12:00:00"):
|
||||
timeout.reached.should.be.falsy
|
||||
|
||||
with freeze_time("2015-01-01 13:00:00"):
|
||||
timeout.reached.should.be.truthy
|
||||
from freezegun import freeze_time
|
||||
import sure # noqa
|
||||
|
||||
from moto.swf.models import Timeout
|
||||
|
||||
from ..utils import make_workflow_execution
|
||||
|
||||
|
||||
def test_timeout_creation():
|
||||
wfe = make_workflow_execution()
|
||||
|
||||
# epoch 1420113600 == "2015-01-01 13:00:00"
|
||||
timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE")
|
||||
|
||||
with freeze_time("2015-01-01 12:00:00"):
|
||||
timeout.reached.should.be.falsy
|
||||
|
||||
with freeze_time("2015-01-01 13:00:00"):
|
||||
timeout.reached.should.be.truthy
|
||||
|
@ -1,114 +1,114 @@
|
||||
import boto
|
||||
from boto.swf.exceptions import SWFResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_swf_deprecated
|
||||
|
||||
|
||||
# RegisterDomain endpoint
|
||||
@mock_swf_deprecated
|
||||
def test_register_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
|
||||
all_domains = conn.list_domains("REGISTERED")
|
||||
domain = all_domains["domainInfos"][0]
|
||||
|
||||
domain["name"].should.equal("test-domain")
|
||||
domain["status"].should.equal("REGISTERED")
|
||||
domain["description"].should.equal("A test domain")
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_register_already_existing_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
|
||||
conn.register_domain.when.called_with(
|
||||
"test-domain", "60", description="A test domain"
|
||||
).should.throw(SWFResponseError)
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_register_with_wrong_parameter_type():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
|
||||
conn.register_domain.when.called_with(
|
||||
"test-domain", 60, description="A test domain"
|
||||
).should.throw(SWFResponseError)
|
||||
|
||||
|
||||
# ListDomains endpoint
|
||||
@mock_swf_deprecated
|
||||
def test_list_domains_order():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("b-test-domain", "60")
|
||||
conn.register_domain("a-test-domain", "60")
|
||||
conn.register_domain("c-test-domain", "60")
|
||||
|
||||
all_domains = conn.list_domains("REGISTERED")
|
||||
names = [domain["name"] for domain in all_domains["domainInfos"]]
|
||||
names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"])
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_list_domains_reverse_order():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("b-test-domain", "60")
|
||||
conn.register_domain("a-test-domain", "60")
|
||||
conn.register_domain("c-test-domain", "60")
|
||||
|
||||
all_domains = conn.list_domains("REGISTERED", reverse_order=True)
|
||||
names = [domain["name"] for domain in all_domains["domainInfos"]]
|
||||
names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"])
|
||||
|
||||
|
||||
# DeprecateDomain endpoint
|
||||
@mock_swf_deprecated
|
||||
def test_deprecate_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
conn.deprecate_domain("test-domain")
|
||||
|
||||
all_domains = conn.list_domains("DEPRECATED")
|
||||
domain = all_domains["domainInfos"][0]
|
||||
|
||||
domain["name"].should.equal("test-domain")
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_deprecate_already_deprecated_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
conn.deprecate_domain("test-domain")
|
||||
|
||||
conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError)
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_deprecate_non_existent_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
|
||||
conn.deprecate_domain.when.called_with("non-existent").should.throw(
|
||||
SWFResponseError
|
||||
)
|
||||
|
||||
|
||||
# DescribeDomain endpoint
|
||||
@mock_swf_deprecated
|
||||
def test_describe_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
|
||||
domain = conn.describe_domain("test-domain")
|
||||
domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60")
|
||||
domain["domainInfo"]["description"].should.equal("A test domain")
|
||||
domain["domainInfo"]["name"].should.equal("test-domain")
|
||||
domain["domainInfo"]["status"].should.equal("REGISTERED")
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_describe_non_existent_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
|
||||
conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError)
|
||||
import boto
|
||||
from boto.swf.exceptions import SWFResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_swf_deprecated
|
||||
|
||||
|
||||
# RegisterDomain endpoint
|
||||
@mock_swf_deprecated
|
||||
def test_register_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
|
||||
all_domains = conn.list_domains("REGISTERED")
|
||||
domain = all_domains["domainInfos"][0]
|
||||
|
||||
domain["name"].should.equal("test-domain")
|
||||
domain["status"].should.equal("REGISTERED")
|
||||
domain["description"].should.equal("A test domain")
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_register_already_existing_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
|
||||
conn.register_domain.when.called_with(
|
||||
"test-domain", "60", description="A test domain"
|
||||
).should.throw(SWFResponseError)
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_register_with_wrong_parameter_type():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
|
||||
conn.register_domain.when.called_with(
|
||||
"test-domain", 60, description="A test domain"
|
||||
).should.throw(SWFResponseError)
|
||||
|
||||
|
||||
# ListDomains endpoint
|
||||
@mock_swf_deprecated
|
||||
def test_list_domains_order():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("b-test-domain", "60")
|
||||
conn.register_domain("a-test-domain", "60")
|
||||
conn.register_domain("c-test-domain", "60")
|
||||
|
||||
all_domains = conn.list_domains("REGISTERED")
|
||||
names = [domain["name"] for domain in all_domains["domainInfos"]]
|
||||
names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"])
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_list_domains_reverse_order():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("b-test-domain", "60")
|
||||
conn.register_domain("a-test-domain", "60")
|
||||
conn.register_domain("c-test-domain", "60")
|
||||
|
||||
all_domains = conn.list_domains("REGISTERED", reverse_order=True)
|
||||
names = [domain["name"] for domain in all_domains["domainInfos"]]
|
||||
names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"])
|
||||
|
||||
|
||||
# DeprecateDomain endpoint
|
||||
@mock_swf_deprecated
|
||||
def test_deprecate_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
conn.deprecate_domain("test-domain")
|
||||
|
||||
all_domains = conn.list_domains("DEPRECATED")
|
||||
domain = all_domains["domainInfos"][0]
|
||||
|
||||
domain["name"].should.equal("test-domain")
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_deprecate_already_deprecated_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
conn.deprecate_domain("test-domain")
|
||||
|
||||
conn.deprecate_domain.when.called_with("test-domain").should.throw(SWFResponseError)
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_deprecate_non_existent_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
|
||||
conn.deprecate_domain.when.called_with("non-existent").should.throw(
|
||||
SWFResponseError
|
||||
)
|
||||
|
||||
|
||||
# DescribeDomain endpoint
|
||||
@mock_swf_deprecated
|
||||
def test_describe_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
conn.register_domain("test-domain", "60", description="A test domain")
|
||||
|
||||
domain = conn.describe_domain("test-domain")
|
||||
domain["configuration"]["workflowExecutionRetentionPeriodInDays"].should.equal("60")
|
||||
domain["domainInfo"]["description"].should.equal("A test domain")
|
||||
domain["domainInfo"]["name"].should.equal("test-domain")
|
||||
domain["domainInfo"]["status"].should.equal("REGISTERED")
|
||||
|
||||
|
||||
@mock_swf_deprecated
|
||||
def test_describe_non_existent_domain():
|
||||
conn = boto.connect_swf("the_key", "the_secret")
|
||||
|
||||
conn.describe_domain.when.called_with("non-existent").should.throw(SWFResponseError)
|
||||
|
@ -1,9 +1,9 @@
|
||||
import sure # noqa
|
||||
|
||||
from moto.swf.utils import decapitalize
|
||||
|
||||
|
||||
def test_decapitalize():
|
||||
cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"}
|
||||
for before, after in cases.items():
|
||||
decapitalize(before).should.equal(after)
|
||||
import sure # noqa
|
||||
|
||||
from moto.swf.utils import decapitalize
|
||||
|
||||
|
||||
def test_decapitalize():
|
||||
cases = {"fooBar": "fooBar", "FooBar": "fooBar", "FOO BAR": "fOO BAR"}
|
||||
for before, after in cases.items():
|
||||
decapitalize(before).should.equal(after)
|
||||
|
Loading…
x
Reference in New Issue
Block a user