Added AWS DataSync mocks and tests

This commit is contained in:
Bjorn Olsen 2019-11-02 21:34:35 +02:00
parent c02c0e4033
commit 97c20dd11d
10 changed files with 539 additions and 203 deletions

View File

@ -30,7 +30,7 @@ Currently implemented Services:
+-----------------------+---------------------+-----------------------------------+ +-----------------------+---------------------+-----------------------------------+
| Data Pipeline | @mock_datapipeline | basic endpoints done | | Data Pipeline | @mock_datapipeline | basic endpoints done |
+-----------------------+---------------------+-----------------------------------+ +-----------------------+---------------------+-----------------------------------+
| DataSync | @mock_datasync | basic endpoints done | | DataSync | @mock_datasync | some endpoints done |
+-----------------------+---------------------+-----------------------------------+ +-----------------------+---------------------+-----------------------------------+
| - DynamoDB | - @mock_dynamodb | - core endpoints done | | - DynamoDB | - @mock_dynamodb | - core endpoints done |
| - DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes| | - DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes|

View File

@ -1,9 +0,0 @@
AWSTemplateFormatVersion: '2010-09-09'
Description: Simple CloudFormation Test Template
Resources:
S3Bucket:
Type: AWS::S3::Bucket
Properties:
AccessControl: PublicRead
BucketName: cf-test-bucket-1

View File

@ -7,14 +7,14 @@ from .autoscaling import mock_autoscaling, mock_autoscaling_deprecated # noqa
from .awslambda import mock_lambda, mock_lambda_deprecated # noqa from .awslambda import mock_lambda, mock_lambda_deprecated # noqa
from .batch import mock_batch # noqa from .batch import mock_batch # noqa
from .cloudformation import mock_cloudformation # noqa from .cloudformation import mock_cloudformation # noqa
from .cloudformation import mock_cloudformation_deprecated from .cloudformation import mock_cloudformation_deprecated # noqa
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # noqa from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # noqa
from .cognitoidentity import mock_cognitoidentity # noqa from .cognitoidentity import mock_cognitoidentity # noqa
from .cognitoidentity import mock_cognitoidentity_deprecated from .cognitoidentity import mock_cognitoidentity_deprecated # noqa
from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # noqa from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # noqa
from .config import mock_config # noqa from .config import mock_config # noqa
from .datapipeline import mock_datapipeline # noqa from .datapipeline import mock_datapipeline # noqa
from .datapipeline import mock_datapipeline_deprecated from .datapipeline import mock_datapipeline_deprecated # noqa
from .datasync import mock_datasync # noqa from .datasync import mock_datasync # noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa
@ -61,7 +61,6 @@ __title__ = "moto"
__version__ = "1.3.14.dev" __version__ = "1.3.14.dev"
try: try:
# Need to monkey-patch botocore requests back to underlying urllib3 classes # Need to monkey-patch botocore requests back to underlying urllib3 classes
from botocore.awsrequest import ( from botocore.awsrequest import (

View File

@ -0,0 +1,15 @@
from __future__ import unicode_literals
from moto.core.exceptions import JsonRESTError
class DataSyncClientError(JsonRESTError):
code = 400
class InvalidRequestException(DataSyncClientError):
def __init__(self, msg=None):
self.code = 400
super(InvalidRequestException, self).__init__(
"InvalidRequestException", msg or "The request is not valid."
)

View File

@ -1,45 +1,97 @@
import json
import logging
import random
import string
import boto3 import boto3
from moto.compat import OrderedDict from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel from moto.core import BaseBackend, BaseModel
from .exceptions import InvalidRequestException
class Location(BaseModel): class Location(BaseModel):
def __init__(
def __init__(self, self, location_uri, region_name=None, typ=None, metadata=None, arn_counter=0
location_uri, ):
region_name,
arn_counter=0):
self.uri = location_uri self.uri = location_uri
self.region_name = region_name self.region_name = region_name
self.metadata = metadata
self.typ = typ
# Generate ARN # Generate ARN
self.arn = 'arn:aws:datasync:{0}:111222333444:location/loc-{1}'.format(region_name, str(arn_counter).zfill(17)) self.arn = "arn:aws:datasync:{0}:111222333444:location/loc-{1}".format(
region_name, str(arn_counter).zfill(17)
)
class Task(BaseModel): class Task(BaseModel):
def __init__(self, def __init__(
source_location_arn, self,
destination_location_arn, source_location_arn,
name, destination_location_arn,
region_name, name,
arn_counter=0): region_name,
arn_counter=0,
):
self.source_location_arn = source_location_arn self.source_location_arn = source_location_arn
self.destination_location_arn = destination_location_arn self.destination_location_arn = destination_location_arn
self.status = 'AVAILABLE' # For simplicity Tasks are either available or running
self.status = "AVAILABLE"
self.name = name self.name = name
self.current_task_execution_arn = None
# Generate ARN # Generate ARN
self.arn = 'arn:aws:datasync:{0}:111222333444:task/task-{1}'.format(region_name, str(arn_counter).zfill(17)) self.arn = "arn:aws:datasync:{0}:111222333444:task/task-{1}".format(
region_name, str(arn_counter).zfill(17)
)
class TaskExecution(BaseModel): class TaskExecution(BaseModel):
def __init__(self,
task_arn, # For simplicity, task_execution can never fail
arn_counter=0): # Some documentation refers to this list:
# 'Status': 'QUEUED'|'LAUNCHING'|'PREPARING'|'TRANSFERRING'|'VERIFYING'|'SUCCESS'|'ERROR'
# Others refers to this list:
# INITIALIZING | PREPARING | TRANSFERRING | VERIFYING | SUCCESS/FAILURE
# Checking with AWS Support...
TASK_EXECUTION_INTERMEDIATE_STATES = (
"INITIALIZING",
# 'QUEUED', 'LAUNCHING',
"PREPARING",
"TRANSFERRING",
"VERIFYING",
)
TASK_EXECUTION_FAILURE_STATES = ("ERROR",)
TASK_EXECUTION_SUCCESS_STATES = ("SUCCESS",)
# Also COMPLETED state?
def __init__(self, task_arn, arn_counter=0):
self.task_arn = task_arn self.task_arn = task_arn
self.arn = '{0}/execution/exec-{1}'.format(task_arn, str(arn_counter).zfill(17)) self.arn = "{0}/execution/exec-{1}".format(task_arn, str(arn_counter).zfill(17))
self.status = self.TASK_EXECUTION_INTERMEDIATE_STATES[0]
# Simulate a task execution
def iterate_status(self):
if self.status in self.TASK_EXECUTION_FAILURE_STATES:
return
if self.status in self.TASK_EXECUTION_SUCCESS_STATES:
return
if self.status in self.TASK_EXECUTION_INTERMEDIATE_STATES:
for i, status in enumerate(self.TASK_EXECUTION_INTERMEDIATE_STATES):
if status == self.status:
if i < len(self.TASK_EXECUTION_INTERMEDIATE_STATES) - 1:
self.status = self.TASK_EXECUTION_INTERMEDIATE_STATES[i + 1]
else:
self.status = self.TASK_EXECUTION_SUCCESS_STATES[0]
return
raise Exception(
"TaskExecution.iterate_status: Unknown status={0}".format(self.status)
)
def cancel(self):
if self.status not in self.TASK_EXECUTION_INTERMEDIATE_STATES:
raise InvalidRequestException(
"Sync task cannot be cancelled in its current status: {0}".format(
self.status
)
)
self.status = "ERROR"
class DataSyncBackend(BaseBackend): class DataSyncBackend(BaseBackend):
def __init__(self, region_name): def __init__(self, region_name):
@ -47,9 +99,9 @@ class DataSyncBackend(BaseBackend):
# Always increase when new things are created # Always increase when new things are created
# This ensures uniqueness # This ensures uniqueness
self.arn_counter = 0 self.arn_counter = 0
self.locations = dict() self.locations = OrderedDict()
self.tasks = dict() self.tasks = OrderedDict()
self.task_executions = dict() self.task_executions = OrderedDict()
def reset(self): def reset(self):
region_name = self.region_name region_name = self.region_name
@ -57,39 +109,69 @@ class DataSyncBackend(BaseBackend):
self.__dict__ = {} self.__dict__ = {}
self.__init__(region_name) self.__init__(region_name)
def create_location(self, location_uri): def create_location(self, location_uri, typ=None, metadata=None):
# TODO BJORN figure out exception """
# TODO BJORN test for exception # AWS DataSync allows for duplicate LocationUris
for arn, location in self.locations.items(): for arn, location in self.locations.items():
if location.uri == location_uri: if location.uri == location_uri:
raise Exception('Location already exists') raise Exception('Location already exists')
"""
if not typ:
raise Exception("Location type must be specified")
self.arn_counter = self.arn_counter + 1 self.arn_counter = self.arn_counter + 1
location = Location(location_uri, location = Location(
region_name=self.region_name, location_uri,
arn_counter=self.arn_counter) region_name=self.region_name,
arn_counter=self.arn_counter,
metadata=metadata,
typ=typ,
)
self.locations[location.arn] = location self.locations[location.arn] = location
return location.arn return location.arn
def create_task(self, def create_task(self, source_location_arn, destination_location_arn, name):
source_location_arn, if source_location_arn not in self.locations:
destination_location_arn, raise InvalidRequestException(
name): "Location {0} not found.".format(source_location_arn)
)
if destination_location_arn not in self.locations:
raise InvalidRequestException(
"Location {0} not found.".format(destination_location_arn)
)
self.arn_counter = self.arn_counter + 1 self.arn_counter = self.arn_counter + 1
task = Task(source_location_arn, task = Task(
destination_location_arn, source_location_arn,
name, destination_location_arn,
region_name=self.region_name, name,
arn_counter=self.arn_counter region_name=self.region_name,
) arn_counter=self.arn_counter,
)
self.tasks[task.arn] = task self.tasks[task.arn] = task
return task.arn return task.arn
def start_task_execution(self, task_arn): def start_task_execution(self, task_arn):
self.arn_counter = self.arn_counter + 1 self.arn_counter = self.arn_counter + 1
task_execution = TaskExecution(task_arn, if task_arn in self.tasks:
arn_counter=self.arn_counter) task = self.tasks[task_arn]
self.task_executions[task_execution.arn] = task_execution if task.status == "AVAILABLE":
return task_execution.arn task_execution = TaskExecution(task_arn, arn_counter=self.arn_counter)
self.task_executions[task_execution.arn] = task_execution
self.tasks[task_arn].current_task_execution_arn = task_execution.arn
self.tasks[task_arn].status = "RUNNING"
return task_execution.arn
raise InvalidRequestException("Invalid request.")
def cancel_task_execution(self, task_execution_arn):
if task_execution_arn in self.task_executions:
task_execution = self.task_executions[task_execution_arn]
task_execution.cancel()
task_arn = task_execution.task_arn
self.tasks[task_arn].current_task_execution_arn = None
return
raise InvalidRequestException(
"Sync task {0} is not found.".format(task_execution_arn)
)
datasync_backends = {} datasync_backends = {}
for region in boto3.Session().get_available_regions("datasync"): for region in boto3.Session().get_available_regions("datasync"):

View File

@ -1,18 +1,12 @@
import json import json
import logging
import re
from moto.core.responses import BaseResponse from moto.core.responses import BaseResponse
from six.moves.urllib.parse import urlparse
from .exceptions import InvalidRequestException
from .models import datasync_backends from .models import datasync_backends
class DataSyncResponse(BaseResponse): class DataSyncResponse(BaseResponse):
# TODO BJORN check datasync rege
region_regex = re.compile(r"://(.+?)\.datasync\.amazonaws\.com")
@property @property
def datasync_backend(self): def datasync_backend(self):
return datasync_backends[self.region] return datasync_backends[self.region]
@ -20,37 +14,77 @@ class DataSyncResponse(BaseResponse):
def list_locations(self): def list_locations(self):
locations = list() locations = list()
for arn, location in self.datasync_backend.locations.items(): for arn, location in self.datasync_backend.locations.items():
locations.append( { locations.append({"LocationArn": location.arn, "LocationUri": location.uri})
'LocationArn': location.arn,
'LocationUri': location.uri
})
return json.dumps({"Locations": locations}) return json.dumps({"Locations": locations})
def _get_location(self, location_arn, typ):
location_arn = self._get_param("LocationArn")
if location_arn not in self.datasync_backend.locations:
raise InvalidRequestException(
"Location {0} is not found.".format(location_arn)
)
location = self.datasync_backend.locations[location_arn]
if location.typ != typ:
raise InvalidRequestException(
"Invalid Location type: {0}".format(location.typ)
)
return location
def create_location_s3(self): def create_location_s3(self):
# s3://bucket_name/folder/ # s3://bucket_name/folder/
s3_bucket_arn = self._get_param("S3BucketArn") s3_bucket_arn = self._get_param("S3BucketArn")
subdirectory = self._get_param("Subdirectory") subdirectory = self._get_param("Subdirectory")
metadata = {"S3Config": self._get_param("S3Config")}
location_uri_elts = ['s3:/', s3_bucket_arn.split(':')[-1]] location_uri_elts = ["s3:/", s3_bucket_arn.split(":")[-1]]
if subdirectory: if subdirectory:
location_uri_elts.append(subdirectory) location_uri_elts.append(subdirectory)
location_uri='/'.join(location_uri_elts) location_uri = "/".join(location_uri_elts)
arn = self.datasync_backend.create_location(location_uri) arn = self.datasync_backend.create_location(
location_uri, metadata=metadata, typ="S3"
return json.dumps({'LocationArn':arn}) )
return json.dumps({"LocationArn": arn})
def describe_location_s3(self):
location_arn = self._get_param("LocationArn")
location = self._get_location(location_arn, typ="S3")
return json.dumps(
{
"LocationArn": location.arn,
"LocationUri": location.uri,
"S3Config": location.metadata["S3Config"],
}
)
def create_location_smb(self): def create_location_smb(self):
# smb://smb.share.fqdn/AWS_Test/ # smb://smb.share.fqdn/AWS_Test/
subdirectory = self._get_param("Subdirectory") subdirectory = self._get_param("Subdirectory")
server_hostname = self._get_param("ServerHostname") server_hostname = self._get_param("ServerHostname")
metadata = {
"AgentArns": self._get_param("AgentArns"),
"User": self._get_param("User"),
"Domain": self._get_param("Domain"),
"MountOptions": self._get_param("MountOptions"),
}
location_uri = '/'.join(['smb:/', server_hostname, subdirectory]) location_uri = "/".join(["smb:/", server_hostname, subdirectory])
arn = self.datasync_backend.create_location(location_uri) arn = self.datasync_backend.create_location(
location_uri, metadata=metadata, typ="SMB"
return json.dumps({'LocationArn':arn}) )
return json.dumps({"LocationArn": arn})
def describe_location_smb(self):
location_arn = self._get_param("LocationArn")
location = self._get_location(location_arn, typ="SMB")
return json.dumps(
{
"LocationArn": location.arn,
"LocationUri": location.uri,
"AgentArns": location.metadata["AgentArns"],
"User": location.metadata["User"],
"Domain": location.metadata["Domain"],
"MountOptions": location.metadata["MountOptions"],
}
)
def create_task(self): def create_task(self):
destination_location_arn = self._get_param("DestinationLocationArn") destination_location_arn = self._get_param("DestinationLocationArn")
@ -58,45 +92,64 @@ class DataSyncResponse(BaseResponse):
name = self._get_param("Name") name = self._get_param("Name")
arn = self.datasync_backend.create_task( arn = self.datasync_backend.create_task(
source_location_arn, source_location_arn, destination_location_arn, name
destination_location_arn,
name
) )
return json.dumps({"TaskArn": arn})
return json.dumps({'TaskArn':arn})
def list_tasks(self): def list_tasks(self):
tasks = list() tasks = list()
for arn, task in self.datasync_backend.tasks.items(): for arn, task in self.datasync_backend.tasks.items():
tasks.append( { tasks.append(
'Name': task.name, {"Name": task.name, "Status": task.status, "TaskArn": task.arn}
'Status': task.status, )
'TaskArn': task.arn
})
return json.dumps({"Tasks": tasks}) return json.dumps({"Tasks": tasks})
def describe_task(self): def describe_task(self):
task_arn = self._get_param("TaskArn") task_arn = self._get_param("TaskArn")
if task_arn in self.datasync_backend.tasks: if task_arn in self.datasync_backend.tasks:
task = self.datasync_backend.tasks[task_arn] task = self.datasync_backend.tasks[task_arn]
return json.dumps({ return json.dumps(
'TaskArn': task.arn, {
'Name': task.name, "TaskArn": task.arn,
'Status': task.status, "Name": task.name,
'SourceLocationArn': task.source_location_arn, "CurrentTaskExecutionArn": task.current_task_execution_arn,
'DestinationLocationArn': task.destination_location_arn "Status": task.status,
}) "SourceLocationArn": task.source_location_arn,
# TODO BJORN exception if task_arn not found? "DestinationLocationArn": task.destination_location_arn,
return None }
)
raise InvalidRequestException
def start_task_execution(self): def start_task_execution(self):
task_arn = self._get_param("TaskArn") task_arn = self._get_param("TaskArn")
if task_arn in self.datasync_backend.tasks: if task_arn in self.datasync_backend.tasks:
arn = self.datasync_backend.start_task_execution( arn = self.datasync_backend.start_task_execution(task_arn)
task_arn if arn:
) return json.dumps({"TaskExecutionArn": arn})
return json.dumps({'TaskExecutionArn':arn}) raise InvalidRequestException("Invalid request.")
# TODO BJORN exception if task_arn not found? def cancel_task_execution(self):
return None task_execution_arn = self._get_param("TaskExecutionArn")
self.datasync_backend.cancel_task_execution(task_execution_arn)
return json.dumps({})
def describe_task_execution(self):
task_execution_arn = self._get_param("TaskExecutionArn")
if task_execution_arn in self.datasync_backend.task_executions:
task_execution = self.datasync_backend.task_executions[task_execution_arn]
if task_execution:
result = json.dumps(
{
"TaskExecutionArn": task_execution.arn,
"Status": task_execution.status,
}
)
if task_execution.status == "SUCCESS":
self.datasync_backend.tasks[
task_execution.task_arn
].status = "AVAILABLE"
# Simulate task being executed
task_execution.iterate_status()
return result
raise InvalidRequestException

View File

@ -6,9 +6,13 @@ from moto.core.responses import BaseResponse
from moto.core.utils import amz_crc32, amzn_request_id from moto.core.utils import amz_crc32, amzn_request_id
from six.moves.urllib.parse import urlparse from six.moves.urllib.parse import urlparse
from .exceptions import (EmptyBatchRequest, InvalidAttributeName, from .exceptions import (
MessageAttributesInvalid, MessageNotInflight, EmptyBatchRequest,
ReceiptHandleIsInvalid) InvalidAttributeName,
MessageAttributesInvalid,
MessageNotInflight,
ReceiptHandleIsInvalid,
)
from .models import sqs_backends from .models import sqs_backends
from .utils import parse_message_attributes from .utils import parse_message_attributes

View File

@ -1,6 +1,7 @@
[nosetests] [nosetests]
verbosity=1 verbosity=1
detailed-errors=1 detailed-errors=1
with-coverage=1
cover-package=moto cover-package=moto
[bdist_wheel] [bdist_wheel]

View File

@ -2,134 +2,326 @@ import logging
import boto import boto
import boto3 import boto3
from botocore.exceptions import ClientError
from moto import mock_datasync from moto import mock_datasync
from nose.tools import assert_raises
''' def create_locations(client, create_smb=False, create_s3=False):
Endpoints I need to test: """
start_task_execution Convenience function for creating locations.
cancel_task_execution Locations must exist before tasks can be created.
describe_task_execution """
''' smb_arn = None
s3_arn = None
if create_smb:
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User="",
Password="",
AgentArns=["stuff"],
)
smb_arn = response["LocationArn"]
if create_s3:
response = client.create_location_s3(
S3BucketArn="arn:aws:s3:::my_bucket",
Subdirectory="dir",
S3Config={"BucketAccessRoleArn": "role"},
)
s3_arn = response["LocationArn"]
return {"smb_arn": smb_arn, "s3_arn": s3_arn}
@mock_datasync @mock_datasync
def test_create_location_smb(): def test_create_location_smb():
client = boto3.client("datasync", region_name="us-east-1") client = boto3.client("datasync", region_name="us-east-1")
response = client.create_location_smb(ServerHostname='host', response = client.create_location_smb(
Subdirectory='somewhere', ServerHostname="host",
User='', Subdirectory="somewhere",
Password='', User="",
AgentArns=['stuff']) Password="",
assert 'LocationArn' in response AgentArns=["stuff"],
)
assert "LocationArn" in response
@mock_datasync
def test_describe_location_smb():
client = boto3.client("datasync", region_name="us-east-1")
agent_arns = ["stuff"]
user = "user"
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User=user,
Password="",
AgentArns=agent_arns,
)
response = client.describe_location_smb(LocationArn=response["LocationArn"])
assert "LocationArn" in response
assert "LocationUri" in response
assert response["User"] == user
assert response["AgentArns"] == agent_arns
@mock_datasync @mock_datasync
def test_create_location_s3(): def test_create_location_s3():
client = boto3.client("datasync", region_name="us-east-1") client = boto3.client("datasync", region_name="us-east-1")
response = client.create_location_s3(S3BucketArn='arn:aws:s3:::my_bucket', response = client.create_location_s3(
Subdirectory='dir', S3BucketArn="arn:aws:s3:::my_bucket",
S3Config={'BucketAccessRoleArn':'role'}) Subdirectory="dir",
assert 'LocationArn' in response S3Config={"BucketAccessRoleArn": "role"},
)
assert "LocationArn" in response
@mock_datasync
def test_describe_location_s3():
client = boto3.client("datasync", region_name="us-east-1")
s3_config = {"BucketAccessRoleArn": "role"}
response = client.create_location_s3(
S3BucketArn="arn:aws:s3:::my_bucket", Subdirectory="dir", S3Config=s3_config
)
response = client.describe_location_s3(LocationArn=response["LocationArn"])
assert "LocationArn" in response
assert "LocationUri" in response
assert response["S3Config"] == s3_config
@mock_datasync
def test_describe_location_wrong():
client = boto3.client("datasync", region_name="us-east-1")
agent_arns = ["stuff"]
user = "user"
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User=user,
Password="",
AgentArns=agent_arns,
)
with assert_raises(ClientError) as e:
response = client.describe_location_s3(LocationArn=response["LocationArn"])
@mock_datasync @mock_datasync
def test_list_locations(): def test_list_locations():
client = boto3.client("datasync", region_name="us-east-1") client = boto3.client("datasync", region_name="us-east-1")
response = client.list_locations() response = client.list_locations()
# TODO BJORN check if Locations exists when there are none assert len(response["Locations"]) == 0
assert len(response['Locations']) == 0
response = client.create_location_smb(ServerHostname='host', create_locations(client, create_smb=True)
Subdirectory='somewhere',
User='',
Password='',
AgentArns=['stuff'])
response = client.list_locations() response = client.list_locations()
assert len(response['Locations']) == 1 assert len(response["Locations"]) == 1
assert response['Locations'][0]['LocationUri'] == 'smb://host/somewhere' assert response["Locations"][0]["LocationUri"] == "smb://host/somewhere"
response = client.create_location_s3(S3BucketArn='arn:aws:s3:::my_bucket',
S3Config={'BucketAccessRoleArn':'role'})
create_locations(client, create_s3=True)
response = client.list_locations() response = client.list_locations()
assert len(response['Locations']) == 2 assert len(response["Locations"]) == 2
assert response['Locations'][1]['LocationUri'] == 's3://my_bucket' assert response["Locations"][1]["LocationUri"] == "s3://my_bucket/dir"
response = client.create_location_s3(S3BucketArn='arn:aws:s3:::my_bucket',
Subdirectory='subdir',
S3Config={'BucketAccessRoleArn':'role'})
create_locations(client, create_s3=True)
response = client.list_locations() response = client.list_locations()
assert len(response['Locations']) == 3 assert len(response["Locations"]) == 3
assert response['Locations'][2]['LocationUri'] == 's3://my_bucket/subdir' assert response["Locations"][2]["LocationUri"] == "s3://my_bucket/dir"
@mock_datasync @mock_datasync
def test_create_task(): def test_create_task():
client = boto3.client("datasync", region_name="us-east-1") client = boto3.client("datasync", region_name="us-east-1")
# TODO BJORN check if task can be created when there are no locations locations = create_locations(client, create_smb=True, create_s3=True)
response = client.create_task( response = client.create_task(
SourceLocationArn='1', SourceLocationArn=locations["smb_arn"],
DestinationLocationArn='2' DestinationLocationArn=locations["s3_arn"],
) )
assert 'TaskArn' in response assert "TaskArn" in response
@mock_datasync
def test_create_task_fail():
""" Test that Locations must exist before a Task can be created """
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_smb=True, create_s3=True)
with assert_raises(ClientError) as e:
response = client.create_task(
SourceLocationArn="1", DestinationLocationArn=locations["s3_arn"]
)
with assert_raises(ClientError) as e:
response = client.create_task(
SourceLocationArn=locations["smb_arn"], DestinationLocationArn="2"
)
@mock_datasync @mock_datasync
def test_list_tasks(): def test_list_tasks():
client = boto3.client("datasync", region_name="us-east-1") client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task( response = client.create_task(
SourceLocationArn='1', SourceLocationArn=locations["smb_arn"],
DestinationLocationArn='2', DestinationLocationArn=locations["s3_arn"],
) )
response = client.create_task( response = client.create_task(
SourceLocationArn='3', SourceLocationArn=locations["s3_arn"],
DestinationLocationArn='4', DestinationLocationArn=locations["smb_arn"],
Name='task_name' Name="task_name",
) )
response = client.list_tasks() response = client.list_tasks()
tasks = response['Tasks'] tasks = response["Tasks"]
assert len(tasks) == 2 assert len(tasks) == 2
task = tasks[0] task = tasks[0]
assert task['Status'] == 'AVAILABLE' assert task["Status"] == "AVAILABLE"
assert 'Name' not in task assert "Name" not in task
task = tasks[1] task = tasks[1]
assert task['Status'] == 'AVAILABLE' assert task["Status"] == "AVAILABLE"
assert task['Name'] == 'task_name' assert task["Name"] == "task_name"
@mock_datasync @mock_datasync
def test_describe_task(): def test_describe_task():
client = boto3.client("datasync", region_name="us-east-1") client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task( response = client.create_task(
SourceLocationArn='3', SourceLocationArn=locations["smb_arn"],
DestinationLocationArn='4', DestinationLocationArn=locations["s3_arn"],
Name='task_name' Name="task_name",
)
task_arn = response['TaskArn']
response = client.describe_task(
TaskArn=task_arn
) )
task_arn = response["TaskArn"]
response = client.describe_task(TaskArn=task_arn)
assert "TaskArn" in response
assert "Status" in response
assert "SourceLocationArn" in response
assert "DestinationLocationArn" in response
@mock_datasync
def test_describe_task_not_exist():
client = boto3.client("datasync", region_name="us-east-1")
with assert_raises(ClientError) as e:
client.describe_task(TaskArn="abc")
assert 'TaskArn' in response
assert 'Status' in response
assert 'SourceLocationArn' in response
assert 'DestinationLocationArn' in response
@mock_datasync @mock_datasync
def test_start_task_execution(): def test_start_task_execution():
client = boto3.client("datasync", region_name="us-east-1") client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task( response = client.create_task(
SourceLocationArn='3', SourceLocationArn=locations["smb_arn"],
DestinationLocationArn='4', DestinationLocationArn=locations["s3_arn"],
Name='task_name' Name="task_name",
)
task_arn = response['TaskArn']
response = client.start_task_execution(
TaskArn=task_arn
) )
assert 'TaskExecutionArn' in response task_arn = response["TaskArn"]
response = client.describe_task(TaskArn=task_arn)
assert "CurrentTaskExecutionArn" not in response
response = client.start_task_execution(TaskArn=task_arn)
assert "TaskExecutionArn" in response
task_execution_arn = response["TaskExecutionArn"]
response = client.describe_task(TaskArn=task_arn)
assert response["CurrentTaskExecutionArn"] == task_execution_arn
@mock_datasync
def test_start_task_execution_twice():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
assert "TaskExecutionArn" in response
task_execution_arn = response["TaskExecutionArn"]
with assert_raises(ClientError) as e:
response = client.start_task_execution(TaskArn=task_arn)
@mock_datasync
def test_describe_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
task_execution_arn = response["TaskExecutionArn"]
# Each time task_execution is described the Status will increment
# This is a simple way to simulate a task being executed
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "INITIALIZING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "PREPARING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "TRANSFERRING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "VERIFYING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "SUCCESS"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "SUCCESS"
@mock_datasync
def test_describe_task_execution_not_exist():
client = boto3.client("datasync", region_name="us-east-1")
with assert_raises(ClientError) as e:
client.describe_task_execution(TaskExecutionArn="abc")
@mock_datasync
def test_cancel_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
task_execution_arn = response["TaskExecutionArn"]
response = client.describe_task(TaskArn=task_arn)
assert response["CurrentTaskExecutionArn"] == task_execution_arn
response = client.cancel_task_execution(TaskExecutionArn=task_execution_arn)
response = client.describe_task(TaskArn=task_arn)
assert "CurrentTaskExecutionArn" not in response
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["Status"] == "ERROR"

View File

@ -1,27 +1,26 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals from __future__ import unicode_literals
import base64
import json
import os import os
import time
import uuid
import boto import boto
import boto3 import boto3
import botocore.exceptions import botocore.exceptions
import six import six
from botocore.exceptions import ClientError
from boto.exception import SQSError
from boto.sqs.message import RawMessage, Message
from freezegun import freeze_time
import base64
import json
import sure # noqa import sure # noqa
import time
import uuid
from moto import settings, mock_sqs, mock_sqs_deprecated
from tests.helpers import requires_boto_gte
import tests.backport_assert_raises # noqa import tests.backport_assert_raises # noqa
from nose.tools import assert_raises from boto.exception import SQSError
from boto.sqs.message import Message, RawMessage
from botocore.exceptions import ClientError
from freezegun import freeze_time
from moto import mock_sqs, mock_sqs_deprecated, settings
from nose import SkipTest from nose import SkipTest
from nose.tools import assert_raises
from tests.helpers import requires_boto_gte
@mock_sqs @mock_sqs
@ -33,7 +32,7 @@ def test_create_fifo_queue_fail():
except botocore.exceptions.ClientError as err: except botocore.exceptions.ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue") err.response["Error"]["Code"].should.equal("InvalidParameterValue")
else: else:
raise RuntimeError("Should of raised InvalidParameterValue Exception")z raise RuntimeError("Should of raised InvalidParameterValue Exception")
@mock_sqs @mock_sqs