Merge pull request #2532 from baolsen/datasync

Added mocks for AWS DataSync #2526
This commit is contained in:
Steve Pulec 2019-11-04 22:51:23 -06:00 committed by GitHub
commit 3dbce228ec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 742 additions and 45 deletions

View File

@ -30,6 +30,8 @@ Currently implemented Services:
+-----------------------+---------------------+-----------------------------------+
| Data Pipeline | @mock_datapipeline | basic endpoints done |
+-----------------------+---------------------+-----------------------------------+
| DataSync | @mock_datasync | some endpoints done |
+-----------------------+---------------------+-----------------------------------+
| - DynamoDB | - @mock_dynamodb | - core endpoints done |
| - DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes|
+-----------------------+---------------------+-----------------------------------+

View File

@ -1,25 +1,21 @@
from __future__ import unicode_literals
# import logging
# logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = "moto"
__version__ = "1.3.14.dev"
from .acm import mock_acm # noqa
from .apigateway import mock_apigateway, mock_apigateway_deprecated # noqa
from .athena import mock_athena # noqa
from .autoscaling import mock_autoscaling, mock_autoscaling_deprecated # noqa
from .awslambda import mock_lambda, mock_lambda_deprecated # noqa
from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # noqa
from .batch import mock_batch # noqa
from .cloudformation import mock_cloudformation # noqa
from .cloudformation import mock_cloudformation_deprecated # noqa
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # noqa
from .cognitoidentity import ( # noqa
mock_cognitoidentity,
mock_cognitoidentity_deprecated,
)
from .cognitoidentity import mock_cognitoidentity # noqa
from .cognitoidentity import mock_cognitoidentity_deprecated # noqa
from .cognitoidp import mock_cognitoidp, mock_cognitoidp_deprecated # noqa
from .config import mock_config # noqa
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # noqa
from .datapipeline import mock_datapipeline # noqa
from .datapipeline import mock_datapipeline_deprecated # noqa
from .datasync import mock_datasync # noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # noqa
from .dynamodbstreams import mock_dynamodbstreams # noqa
@ -33,31 +29,36 @@ from .events import mock_events # noqa
from .glacier import mock_glacier, mock_glacier_deprecated # noqa
from .glue import mock_glue # noqa
from .iam import mock_iam, mock_iam_deprecated # noqa
from .iot import mock_iot # noqa
from .iotdata import mock_iotdata # noqa
from .kinesis import mock_kinesis, mock_kinesis_deprecated # noqa
from .kms import mock_kms, mock_kms_deprecated # noqa
from .organizations import mock_organizations # noqa
from .logs import mock_logs, mock_logs_deprecated # noqa
from .opsworks import mock_opsworks, mock_opsworks_deprecated # noqa
from .organizations import mock_organizations # noqa
from .polly import mock_polly # noqa
from .rds import mock_rds, mock_rds_deprecated # noqa
from .rds2 import mock_rds2, mock_rds2_deprecated # noqa
from .redshift import mock_redshift, mock_redshift_deprecated # noqa
from .resourcegroups import mock_resourcegroups # noqa
from .resourcegroupstaggingapi import mock_resourcegroupstaggingapi # noqa
from .route53 import mock_route53, mock_route53_deprecated # noqa
from .s3 import mock_s3, mock_s3_deprecated # noqa
from .ses import mock_ses, mock_ses_deprecated # noqa
from .secretsmanager import mock_secretsmanager # noqa
from .ses import mock_ses, mock_ses_deprecated # noqa
from .sns import mock_sns, mock_sns_deprecated # noqa
from .sqs import mock_sqs, mock_sqs_deprecated # noqa
from .ssm import mock_ssm # noqa
from .stepfunctions import mock_stepfunctions # noqa
from .sts import mock_sts, mock_sts_deprecated # noqa
from .ssm import mock_ssm # noqa
from .route53 import mock_route53, mock_route53_deprecated # noqa
from .swf import mock_swf, mock_swf_deprecated # noqa
from .xray import mock_xray, mock_xray_client, XRaySegment # noqa
from .logs import mock_logs, mock_logs_deprecated # noqa
from .batch import mock_batch # noqa
from .resourcegroupstaggingapi import mock_resourcegroupstaggingapi # noqa
from .iot import mock_iot # noqa
from .iotdata import mock_iotdata # noqa
from .xray import XRaySegment, mock_xray, mock_xray_client # noqa
# import logging
# logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = "moto"
__version__ = "1.3.14.dev"
try:

View File

@ -5,12 +5,15 @@ from moto.apigateway import apigateway_backends
from moto.athena import athena_backends
from moto.autoscaling import autoscaling_backends
from moto.awslambda import lambda_backends
from moto.batch import batch_backends
from moto.cloudformation import cloudformation_backends
from moto.cloudwatch import cloudwatch_backends
from moto.cognitoidentity import cognitoidentity_backends
from moto.cognitoidp import cognitoidp_backends
from moto.config import config_backends
from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends
from moto.datasync import datasync_backends
from moto.dynamodb import dynamodb_backends
from moto.dynamodb2 import dynamodb_backends2
from moto.dynamodbstreams import dynamodbstreams_backends
@ -25,6 +28,8 @@ from moto.glacier import glacier_backends
from moto.glue import glue_backends
from moto.iam import iam_backends
from moto.instance_metadata import instance_metadata_backends
from moto.iot import iot_backends
from moto.iotdata import iotdata_backends
from moto.kinesis import kinesis_backends
from moto.kms import kms_backends
from moto.logs import logs_backends
@ -34,6 +39,7 @@ from moto.polly import polly_backends
from moto.rds2 import rds2_backends
from moto.redshift import redshift_backends
from moto.resourcegroups import resourcegroups_backends
from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends
from moto.route53 import route53_backends
from moto.s3 import s3_backends
from moto.secretsmanager import secretsmanager_backends
@ -45,11 +51,6 @@ from moto.stepfunctions import stepfunction_backends
from moto.sts import sts_backends
from moto.swf import swf_backends
from moto.xray import xray_backends
from moto.iot import iot_backends
from moto.iotdata import iotdata_backends
from moto.batch import batch_backends
from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends
from moto.config import config_backends
BACKENDS = {
"acm": acm_backends,
@ -63,6 +64,7 @@ BACKENDS = {
"cognito-idp": cognitoidp_backends,
"config": config_backends,
"datapipeline": datapipeline_backends,
"datasync": datasync_backends,
"dynamodb": dynamodb_backends,
"dynamodb2": dynamodb_backends2,
"dynamodbstreams": dynamodbstreams_backends,

View File

@ -0,0 +1,8 @@
from __future__ import unicode_literals
from ..core.models import base_decorator, deprecated_base_decorator
from .models import datasync_backends
datasync_backend = datasync_backends["us-east-1"]
mock_datasync = base_decorator(datasync_backends)
mock_datasync_deprecated = deprecated_base_decorator(datasync_backends)

View File

@ -0,0 +1,15 @@
from __future__ import unicode_literals
from moto.core.exceptions import JsonRESTError
class DataSyncClientError(JsonRESTError):
code = 400
class InvalidRequestException(DataSyncClientError):
def __init__(self, msg=None):
self.code = 400
super(InvalidRequestException, self).__init__(
"InvalidRequestException", msg or "The request is not valid."
)

178
moto/datasync/models.py Normal file
View File

@ -0,0 +1,178 @@
import boto3
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel
from .exceptions import InvalidRequestException
class Location(BaseModel):
def __init__(
self, location_uri, region_name=None, typ=None, metadata=None, arn_counter=0
):
self.uri = location_uri
self.region_name = region_name
self.metadata = metadata
self.typ = typ
# Generate ARN
self.arn = "arn:aws:datasync:{0}:111222333444:location/loc-{1}".format(
region_name, str(arn_counter).zfill(17)
)
class Task(BaseModel):
def __init__(
self,
source_location_arn,
destination_location_arn,
name,
region_name,
arn_counter=0,
):
self.source_location_arn = source_location_arn
self.destination_location_arn = destination_location_arn
# For simplicity Tasks are either available or running
self.status = "AVAILABLE"
self.name = name
self.current_task_execution_arn = None
# Generate ARN
self.arn = "arn:aws:datasync:{0}:111222333444:task/task-{1}".format(
region_name, str(arn_counter).zfill(17)
)
class TaskExecution(BaseModel):
# For simplicity, task_execution can never fail
# Some documentation refers to this list:
# 'Status': 'QUEUED'|'LAUNCHING'|'PREPARING'|'TRANSFERRING'|'VERIFYING'|'SUCCESS'|'ERROR'
# Others refers to this list:
# INITIALIZING | PREPARING | TRANSFERRING | VERIFYING | SUCCESS/FAILURE
# Checking with AWS Support...
TASK_EXECUTION_INTERMEDIATE_STATES = (
"INITIALIZING",
# 'QUEUED', 'LAUNCHING',
"PREPARING",
"TRANSFERRING",
"VERIFYING",
)
TASK_EXECUTION_FAILURE_STATES = ("ERROR",)
TASK_EXECUTION_SUCCESS_STATES = ("SUCCESS",)
# Also COMPLETED state?
def __init__(self, task_arn, arn_counter=0):
self.task_arn = task_arn
self.arn = "{0}/execution/exec-{1}".format(task_arn, str(arn_counter).zfill(17))
self.status = self.TASK_EXECUTION_INTERMEDIATE_STATES[0]
# Simulate a task execution
def iterate_status(self):
if self.status in self.TASK_EXECUTION_FAILURE_STATES:
return
if self.status in self.TASK_EXECUTION_SUCCESS_STATES:
return
if self.status in self.TASK_EXECUTION_INTERMEDIATE_STATES:
for i, status in enumerate(self.TASK_EXECUTION_INTERMEDIATE_STATES):
if status == self.status:
if i < len(self.TASK_EXECUTION_INTERMEDIATE_STATES) - 1:
self.status = self.TASK_EXECUTION_INTERMEDIATE_STATES[i + 1]
else:
self.status = self.TASK_EXECUTION_SUCCESS_STATES[0]
return
raise Exception(
"TaskExecution.iterate_status: Unknown status={0}".format(self.status)
)
def cancel(self):
if self.status not in self.TASK_EXECUTION_INTERMEDIATE_STATES:
raise InvalidRequestException(
"Sync task cannot be cancelled in its current status: {0}".format(
self.status
)
)
self.status = "ERROR"
class DataSyncBackend(BaseBackend):
def __init__(self, region_name):
self.region_name = region_name
# Always increase when new things are created
# This ensures uniqueness
self.arn_counter = 0
self.locations = OrderedDict()
self.tasks = OrderedDict()
self.task_executions = OrderedDict()
def reset(self):
region_name = self.region_name
self._reset_model_refs()
self.__dict__ = {}
self.__init__(region_name)
def create_location(self, location_uri, typ=None, metadata=None):
"""
# AWS DataSync allows for duplicate LocationUris
for arn, location in self.locations.items():
if location.uri == location_uri:
raise Exception('Location already exists')
"""
if not typ:
raise Exception("Location type must be specified")
self.arn_counter = self.arn_counter + 1
location = Location(
location_uri,
region_name=self.region_name,
arn_counter=self.arn_counter,
metadata=metadata,
typ=typ,
)
self.locations[location.arn] = location
return location.arn
def create_task(self, source_location_arn, destination_location_arn, name):
if source_location_arn not in self.locations:
raise InvalidRequestException(
"Location {0} not found.".format(source_location_arn)
)
if destination_location_arn not in self.locations:
raise InvalidRequestException(
"Location {0} not found.".format(destination_location_arn)
)
self.arn_counter = self.arn_counter + 1
task = Task(
source_location_arn,
destination_location_arn,
name,
region_name=self.region_name,
arn_counter=self.arn_counter,
)
self.tasks[task.arn] = task
return task.arn
def start_task_execution(self, task_arn):
self.arn_counter = self.arn_counter + 1
if task_arn in self.tasks:
task = self.tasks[task_arn]
if task.status == "AVAILABLE":
task_execution = TaskExecution(task_arn, arn_counter=self.arn_counter)
self.task_executions[task_execution.arn] = task_execution
self.tasks[task_arn].current_task_execution_arn = task_execution.arn
self.tasks[task_arn].status = "RUNNING"
return task_execution.arn
raise InvalidRequestException("Invalid request.")
def cancel_task_execution(self, task_execution_arn):
if task_execution_arn in self.task_executions:
task_execution = self.task_executions[task_execution_arn]
task_execution.cancel()
task_arn = task_execution.task_arn
self.tasks[task_arn].current_task_execution_arn = None
return
raise InvalidRequestException(
"Sync task {0} is not found.".format(task_execution_arn)
)
datasync_backends = {}
for region in boto3.Session().get_available_regions("datasync"):
datasync_backends[region] = DataSyncBackend(region_name=region)

155
moto/datasync/responses.py Normal file
View File

@ -0,0 +1,155 @@
import json
from moto.core.responses import BaseResponse
from .exceptions import InvalidRequestException
from .models import datasync_backends
class DataSyncResponse(BaseResponse):
@property
def datasync_backend(self):
return datasync_backends[self.region]
def list_locations(self):
locations = list()
for arn, location in self.datasync_backend.locations.items():
locations.append({"LocationArn": location.arn, "LocationUri": location.uri})
return json.dumps({"Locations": locations})
def _get_location(self, location_arn, typ):
location_arn = self._get_param("LocationArn")
if location_arn not in self.datasync_backend.locations:
raise InvalidRequestException(
"Location {0} is not found.".format(location_arn)
)
location = self.datasync_backend.locations[location_arn]
if location.typ != typ:
raise InvalidRequestException(
"Invalid Location type: {0}".format(location.typ)
)
return location
def create_location_s3(self):
# s3://bucket_name/folder/
s3_bucket_arn = self._get_param("S3BucketArn")
subdirectory = self._get_param("Subdirectory")
metadata = {"S3Config": self._get_param("S3Config")}
location_uri_elts = ["s3:/", s3_bucket_arn.split(":")[-1]]
if subdirectory:
location_uri_elts.append(subdirectory)
location_uri = "/".join(location_uri_elts)
arn = self.datasync_backend.create_location(
location_uri, metadata=metadata, typ="S3"
)
return json.dumps({"LocationArn": arn})
def describe_location_s3(self):
location_arn = self._get_param("LocationArn")
location = self._get_location(location_arn, typ="S3")
return json.dumps(
{
"LocationArn": location.arn,
"LocationUri": location.uri,
"S3Config": location.metadata["S3Config"],
}
)
def create_location_smb(self):
# smb://smb.share.fqdn/AWS_Test/
subdirectory = self._get_param("Subdirectory")
server_hostname = self._get_param("ServerHostname")
metadata = {
"AgentArns": self._get_param("AgentArns"),
"User": self._get_param("User"),
"Domain": self._get_param("Domain"),
"MountOptions": self._get_param("MountOptions"),
}
location_uri = "/".join(["smb:/", server_hostname, subdirectory])
arn = self.datasync_backend.create_location(
location_uri, metadata=metadata, typ="SMB"
)
return json.dumps({"LocationArn": arn})
def describe_location_smb(self):
location_arn = self._get_param("LocationArn")
location = self._get_location(location_arn, typ="SMB")
return json.dumps(
{
"LocationArn": location.arn,
"LocationUri": location.uri,
"AgentArns": location.metadata["AgentArns"],
"User": location.metadata["User"],
"Domain": location.metadata["Domain"],
"MountOptions": location.metadata["MountOptions"],
}
)
def create_task(self):
destination_location_arn = self._get_param("DestinationLocationArn")
source_location_arn = self._get_param("SourceLocationArn")
name = self._get_param("Name")
arn = self.datasync_backend.create_task(
source_location_arn, destination_location_arn, name
)
return json.dumps({"TaskArn": arn})
def list_tasks(self):
tasks = list()
for arn, task in self.datasync_backend.tasks.items():
tasks.append(
{"Name": task.name, "Status": task.status, "TaskArn": task.arn}
)
return json.dumps({"Tasks": tasks})
def describe_task(self):
task_arn = self._get_param("TaskArn")
if task_arn in self.datasync_backend.tasks:
task = self.datasync_backend.tasks[task_arn]
return json.dumps(
{
"TaskArn": task.arn,
"Name": task.name,
"CurrentTaskExecutionArn": task.current_task_execution_arn,
"Status": task.status,
"SourceLocationArn": task.source_location_arn,
"DestinationLocationArn": task.destination_location_arn,
}
)
raise InvalidRequestException
def start_task_execution(self):
task_arn = self._get_param("TaskArn")
if task_arn in self.datasync_backend.tasks:
arn = self.datasync_backend.start_task_execution(task_arn)
if arn:
return json.dumps({"TaskExecutionArn": arn})
raise InvalidRequestException("Invalid request.")
def cancel_task_execution(self):
task_execution_arn = self._get_param("TaskExecutionArn")
self.datasync_backend.cancel_task_execution(task_execution_arn)
return json.dumps({})
def describe_task_execution(self):
task_execution_arn = self._get_param("TaskExecutionArn")
if task_execution_arn in self.datasync_backend.task_executions:
task_execution = self.datasync_backend.task_executions[task_execution_arn]
if task_execution:
result = json.dumps(
{
"TaskExecutionArn": task_execution.arn,
"Status": task_execution.status,
}
)
if task_execution.status == "SUCCESS":
self.datasync_backend.tasks[
task_execution.task_arn
].status = "AVAILABLE"
# Simulate task being executed
task_execution.iterate_status()
return result
raise InvalidRequestException

9
moto/datasync/urls.py Normal file
View File

@ -0,0 +1,9 @@
from __future__ import unicode_literals
from .responses import DataSyncResponse
url_bases = ["https?://(.*?)(datasync)(.*?).amazonaws.com"]
url_paths = {
"{0}/$": DataSyncResponse.dispatch,
}

View File

@ -1,19 +1,20 @@
from __future__ import unicode_literals
import re
from six.moves.urllib.parse import urlparse
from moto.core.responses import BaseResponse
from moto.core.utils import amz_crc32, amzn_request_id
from .utils import parse_message_attributes
from .models import sqs_backends
from six.moves.urllib.parse import urlparse
from .exceptions import (
EmptyBatchRequest,
InvalidAttributeName,
MessageAttributesInvalid,
MessageNotInflight,
ReceiptHandleIsInvalid,
EmptyBatchRequest,
InvalidAttributeName,
)
from .models import sqs_backends
from .utils import parse_message_attributes
MAXIMUM_VISIBILTY_TIMEOUT = 43200
MAXIMUM_MESSAGE_LENGTH = 262144 # 256 KiB

View File

View File

@ -0,0 +1,327 @@
import logging
import boto
import boto3
from botocore.exceptions import ClientError
from moto import mock_datasync
from nose.tools import assert_raises
def create_locations(client, create_smb=False, create_s3=False):
"""
Convenience function for creating locations.
Locations must exist before tasks can be created.
"""
smb_arn = None
s3_arn = None
if create_smb:
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User="",
Password="",
AgentArns=["stuff"],
)
smb_arn = response["LocationArn"]
if create_s3:
response = client.create_location_s3(
S3BucketArn="arn:aws:s3:::my_bucket",
Subdirectory="dir",
S3Config={"BucketAccessRoleArn": "role"},
)
s3_arn = response["LocationArn"]
return {"smb_arn": smb_arn, "s3_arn": s3_arn}
@mock_datasync
def test_create_location_smb():
client = boto3.client("datasync", region_name="us-east-1")
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User="",
Password="",
AgentArns=["stuff"],
)
assert "LocationArn" in response
@mock_datasync
def test_describe_location_smb():
client = boto3.client("datasync", region_name="us-east-1")
agent_arns = ["stuff"]
user = "user"
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User=user,
Password="",
AgentArns=agent_arns,
)
response = client.describe_location_smb(LocationArn=response["LocationArn"])
assert "LocationArn" in response
assert "LocationUri" in response
assert response["User"] == user
assert response["AgentArns"] == agent_arns
@mock_datasync
def test_create_location_s3():
client = boto3.client("datasync", region_name="us-east-1")
response = client.create_location_s3(
S3BucketArn="arn:aws:s3:::my_bucket",
Subdirectory="dir",
S3Config={"BucketAccessRoleArn": "role"},
)
assert "LocationArn" in response
@mock_datasync
def test_describe_location_s3():
client = boto3.client("datasync", region_name="us-east-1")
s3_config = {"BucketAccessRoleArn": "role"}
response = client.create_location_s3(
S3BucketArn="arn:aws:s3:::my_bucket", Subdirectory="dir", S3Config=s3_config
)
response = client.describe_location_s3(LocationArn=response["LocationArn"])
assert "LocationArn" in response
assert "LocationUri" in response
assert response["S3Config"] == s3_config
@mock_datasync
def test_describe_location_wrong():
client = boto3.client("datasync", region_name="us-east-1")
agent_arns = ["stuff"]
user = "user"
response = client.create_location_smb(
ServerHostname="host",
Subdirectory="somewhere",
User=user,
Password="",
AgentArns=agent_arns,
)
with assert_raises(ClientError) as e:
response = client.describe_location_s3(LocationArn=response["LocationArn"])
@mock_datasync
def test_list_locations():
client = boto3.client("datasync", region_name="us-east-1")
response = client.list_locations()
assert len(response["Locations"]) == 0
create_locations(client, create_smb=True)
response = client.list_locations()
assert len(response["Locations"]) == 1
assert response["Locations"][0]["LocationUri"] == "smb://host/somewhere"
create_locations(client, create_s3=True)
response = client.list_locations()
assert len(response["Locations"]) == 2
assert response["Locations"][1]["LocationUri"] == "s3://my_bucket/dir"
create_locations(client, create_s3=True)
response = client.list_locations()
assert len(response["Locations"]) == 3
assert response["Locations"][2]["LocationUri"] == "s3://my_bucket/dir"
@mock_datasync
def test_create_task():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_smb=True, create_s3=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
)
assert "TaskArn" in response
@mock_datasync
def test_create_task_fail():
""" Test that Locations must exist before a Task can be created """
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_smb=True, create_s3=True)
with assert_raises(ClientError) as e:
response = client.create_task(
SourceLocationArn="1", DestinationLocationArn=locations["s3_arn"]
)
with assert_raises(ClientError) as e:
response = client.create_task(
SourceLocationArn=locations["smb_arn"], DestinationLocationArn="2"
)
@mock_datasync
def test_list_tasks():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
)
response = client.create_task(
SourceLocationArn=locations["s3_arn"],
DestinationLocationArn=locations["smb_arn"],
Name="task_name",
)
response = client.list_tasks()
tasks = response["Tasks"]
assert len(tasks) == 2
task = tasks[0]
assert task["Status"] == "AVAILABLE"
assert "Name" not in task
task = tasks[1]
assert task["Status"] == "AVAILABLE"
assert task["Name"] == "task_name"
@mock_datasync
def test_describe_task():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.describe_task(TaskArn=task_arn)
assert "TaskArn" in response
assert "Status" in response
assert "SourceLocationArn" in response
assert "DestinationLocationArn" in response
@mock_datasync
def test_describe_task_not_exist():
client = boto3.client("datasync", region_name="us-east-1")
with assert_raises(ClientError) as e:
client.describe_task(TaskArn="abc")
@mock_datasync
def test_start_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.describe_task(TaskArn=task_arn)
assert "CurrentTaskExecutionArn" not in response
response = client.start_task_execution(TaskArn=task_arn)
assert "TaskExecutionArn" in response
task_execution_arn = response["TaskExecutionArn"]
response = client.describe_task(TaskArn=task_arn)
assert response["CurrentTaskExecutionArn"] == task_execution_arn
@mock_datasync
def test_start_task_execution_twice():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
assert "TaskExecutionArn" in response
task_execution_arn = response["TaskExecutionArn"]
with assert_raises(ClientError) as e:
response = client.start_task_execution(TaskArn=task_arn)
@mock_datasync
def test_describe_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
task_execution_arn = response["TaskExecutionArn"]
# Each time task_execution is described the Status will increment
# This is a simple way to simulate a task being executed
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "INITIALIZING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "PREPARING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "TRANSFERRING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "VERIFYING"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "SUCCESS"
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["TaskExecutionArn"] == task_execution_arn
assert response["Status"] == "SUCCESS"
@mock_datasync
def test_describe_task_execution_not_exist():
client = boto3.client("datasync", region_name="us-east-1")
with assert_raises(ClientError) as e:
client.describe_task_execution(TaskExecutionArn="abc")
@mock_datasync
def test_cancel_task_execution():
client = boto3.client("datasync", region_name="us-east-1")
locations = create_locations(client, create_s3=True, create_smb=True)
response = client.create_task(
SourceLocationArn=locations["smb_arn"],
DestinationLocationArn=locations["s3_arn"],
Name="task_name",
)
task_arn = response["TaskArn"]
response = client.start_task_execution(TaskArn=task_arn)
task_execution_arn = response["TaskExecutionArn"]
response = client.describe_task(TaskArn=task_arn)
assert response["CurrentTaskExecutionArn"] == task_execution_arn
response = client.cancel_task_execution(TaskExecutionArn=task_execution_arn)
response = client.describe_task(TaskArn=task_arn)
assert "CurrentTaskExecutionArn" not in response
response = client.describe_task_execution(TaskExecutionArn=task_execution_arn)
assert response["Status"] == "ERROR"

View File

@ -1,27 +1,26 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import base64
import json
import os
import time
import uuid
import boto
import boto3
import botocore.exceptions
import six
from botocore.exceptions import ClientError
from boto.exception import SQSError
from boto.sqs.message import RawMessage, Message
from freezegun import freeze_time
import base64
import json
import sure # noqa
import time
import uuid
from moto import settings, mock_sqs, mock_sqs_deprecated
from tests.helpers import requires_boto_gte
import tests.backport_assert_raises # noqa
from nose.tools import assert_raises
from boto.exception import SQSError
from boto.sqs.message import Message, RawMessage
from botocore.exceptions import ClientError
from freezegun import freeze_time
from moto import mock_sqs, mock_sqs_deprecated, settings
from nose import SkipTest
from nose.tools import assert_raises
from tests.helpers import requires_boto_gte
@mock_sqs