2021-08-04 16:24:26 +00:00
|
|
|
from collections import OrderedDict
|
2022-12-11 13:47:26 +00:00
|
|
|
from typing import Any, Dict, List, Optional
|
2022-11-10 09:43:20 +00:00
|
|
|
from moto.core import BaseBackend, BackendDict, BaseModel
|
2019-11-01 17:24:21 +00:00
|
|
|
|
2019-11-02 19:34:35 +00:00
|
|
|
from .exceptions import InvalidRequestException
|
2019-11-01 17:24:21 +00:00
|
|
|
|
|
|
|
|
2019-11-02 19:34:35 +00:00
|
|
|
class Location(BaseModel):
|
|
|
|
def __init__(
|
2022-12-11 13:47:26 +00:00
|
|
|
self,
|
|
|
|
location_uri: str,
|
|
|
|
region_name: str,
|
|
|
|
typ: str,
|
|
|
|
metadata: Dict[str, Any],
|
|
|
|
arn_counter: int = 0,
|
2019-11-02 19:34:35 +00:00
|
|
|
):
|
2019-11-01 19:16:59 +00:00
|
|
|
self.uri = location_uri
|
|
|
|
self.region_name = region_name
|
2019-11-02 19:34:35 +00:00
|
|
|
self.metadata = metadata
|
|
|
|
self.typ = typ
|
2019-11-01 19:16:59 +00:00
|
|
|
# Generate ARN
|
2022-11-12 22:42:33 +00:00
|
|
|
self.arn = f"arn:aws:datasync:{region_name}:111222333444:location/loc-{str(arn_counter).zfill(17)}"
|
2019-11-01 17:24:21 +00:00
|
|
|
|
|
|
|
|
2019-11-01 19:16:59 +00:00
|
|
|
class Task(BaseModel):
|
2019-11-02 19:34:35 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
2022-12-11 13:47:26 +00:00
|
|
|
source_location_arn: str,
|
|
|
|
destination_location_arn: str,
|
|
|
|
name: str,
|
|
|
|
region_name: str,
|
|
|
|
metadata: Dict[str, Any],
|
|
|
|
arn_counter: int = 0,
|
2019-11-02 19:34:35 +00:00
|
|
|
):
|
2019-11-01 19:16:59 +00:00
|
|
|
self.source_location_arn = source_location_arn
|
|
|
|
self.destination_location_arn = destination_location_arn
|
2019-11-05 10:30:05 +00:00
|
|
|
self.name = name
|
|
|
|
self.metadata = metadata
|
2019-11-02 19:34:35 +00:00
|
|
|
# For simplicity Tasks are either available or running
|
|
|
|
self.status = "AVAILABLE"
|
2022-12-11 13:47:26 +00:00
|
|
|
self.current_task_execution_arn: Optional[str] = None
|
2019-11-01 19:16:59 +00:00
|
|
|
# Generate ARN
|
2022-11-12 22:42:33 +00:00
|
|
|
self.arn = f"arn:aws:datasync:{region_name}:111222333444:task/task-{str(arn_counter).zfill(17)}"
|
2019-11-02 19:34:35 +00:00
|
|
|
|
2019-11-01 17:24:21 +00:00
|
|
|
|
2019-11-01 19:16:59 +00:00
|
|
|
class TaskExecution(BaseModel):
|
2019-11-02 19:34:35 +00:00
|
|
|
|
|
|
|
# For simplicity, task_execution can never fail
|
|
|
|
# Some documentation refers to this list:
|
|
|
|
# 'Status': 'QUEUED'|'LAUNCHING'|'PREPARING'|'TRANSFERRING'|'VERIFYING'|'SUCCESS'|'ERROR'
|
|
|
|
# Others refers to this list:
|
|
|
|
# INITIALIZING | PREPARING | TRANSFERRING | VERIFYING | SUCCESS/FAILURE
|
|
|
|
# Checking with AWS Support...
|
|
|
|
TASK_EXECUTION_INTERMEDIATE_STATES = (
|
|
|
|
"INITIALIZING",
|
|
|
|
# 'QUEUED', 'LAUNCHING',
|
|
|
|
"PREPARING",
|
|
|
|
"TRANSFERRING",
|
|
|
|
"VERIFYING",
|
|
|
|
)
|
|
|
|
|
|
|
|
TASK_EXECUTION_FAILURE_STATES = ("ERROR",)
|
|
|
|
TASK_EXECUTION_SUCCESS_STATES = ("SUCCESS",)
|
|
|
|
# Also COMPLETED state?
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def __init__(self, task_arn: str, arn_counter: int = 0):
|
2019-11-01 19:16:59 +00:00
|
|
|
self.task_arn = task_arn
|
2022-11-12 22:42:33 +00:00
|
|
|
self.arn = f"{task_arn}/execution/exec-{str(arn_counter).zfill(17)}"
|
2019-11-02 19:34:35 +00:00
|
|
|
self.status = self.TASK_EXECUTION_INTERMEDIATE_STATES[0]
|
|
|
|
|
|
|
|
# Simulate a task execution
|
2022-12-11 13:47:26 +00:00
|
|
|
def iterate_status(self) -> None:
|
2019-11-02 19:34:35 +00:00
|
|
|
if self.status in self.TASK_EXECUTION_FAILURE_STATES:
|
|
|
|
return
|
|
|
|
if self.status in self.TASK_EXECUTION_SUCCESS_STATES:
|
|
|
|
return
|
|
|
|
if self.status in self.TASK_EXECUTION_INTERMEDIATE_STATES:
|
|
|
|
for i, status in enumerate(self.TASK_EXECUTION_INTERMEDIATE_STATES):
|
|
|
|
if status == self.status:
|
|
|
|
if i < len(self.TASK_EXECUTION_INTERMEDIATE_STATES) - 1:
|
|
|
|
self.status = self.TASK_EXECUTION_INTERMEDIATE_STATES[i + 1]
|
|
|
|
else:
|
|
|
|
self.status = self.TASK_EXECUTION_SUCCESS_STATES[0]
|
|
|
|
return
|
2022-11-12 22:42:33 +00:00
|
|
|
raise Exception(f"TaskExecution.iterate_status: Unknown status={self.status}")
|
2019-11-02 19:34:35 +00:00
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def cancel(self) -> None:
|
2019-11-02 19:34:35 +00:00
|
|
|
if self.status not in self.TASK_EXECUTION_INTERMEDIATE_STATES:
|
|
|
|
raise InvalidRequestException(
|
2022-11-12 22:42:33 +00:00
|
|
|
f"Sync task cannot be cancelled in its current status: {self.status}"
|
2019-11-02 19:34:35 +00:00
|
|
|
)
|
|
|
|
self.status = "ERROR"
|
|
|
|
|
2019-11-01 17:24:21 +00:00
|
|
|
|
|
|
|
class DataSyncBackend(BaseBackend):
|
2022-12-11 13:47:26 +00:00
|
|
|
def __init__(self, region_name: str, account_id: str):
|
2022-06-04 11:30:16 +00:00
|
|
|
super().__init__(region_name, account_id)
|
2019-11-01 19:16:59 +00:00
|
|
|
# Always increase when new things are created
|
|
|
|
# This ensures uniqueness
|
2019-11-02 19:34:35 +00:00
|
|
|
self.arn_counter = 0
|
2022-12-11 13:47:26 +00:00
|
|
|
self.locations: Dict[str, Location] = OrderedDict()
|
|
|
|
self.tasks: Dict[str, Task] = OrderedDict()
|
|
|
|
self.task_executions: Dict[str, TaskExecution] = OrderedDict()
|
2019-11-02 19:34:35 +00:00
|
|
|
|
2021-09-24 16:01:09 +00:00
|
|
|
@staticmethod
|
2022-12-11 13:47:26 +00:00
|
|
|
def default_vpc_endpoint_service(service_region: str, zones: List[str]) -> List[Dict[str, Any]]: # type: ignore[misc]
|
2021-09-24 16:01:09 +00:00
|
|
|
"""Default VPC endpoint service."""
|
|
|
|
return BaseBackend.default_vpc_endpoint_service_factory(
|
|
|
|
service_region, zones, "datasync"
|
|
|
|
)
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def create_location(
|
|
|
|
self, location_uri: str, typ: str, metadata: Dict[str, Any]
|
|
|
|
) -> str:
|
2019-11-02 19:34:35 +00:00
|
|
|
"""
|
|
|
|
# AWS DataSync allows for duplicate LocationUris
|
2019-11-01 19:16:59 +00:00
|
|
|
for arn, location in self.locations.items():
|
|
|
|
if location.uri == location_uri:
|
|
|
|
raise Exception('Location already exists')
|
2019-11-02 19:34:35 +00:00
|
|
|
"""
|
|
|
|
if not typ:
|
|
|
|
raise Exception("Location type must be specified")
|
2019-11-01 19:16:59 +00:00
|
|
|
self.arn_counter = self.arn_counter + 1
|
2019-11-02 19:34:35 +00:00
|
|
|
location = Location(
|
|
|
|
location_uri,
|
|
|
|
region_name=self.region_name,
|
|
|
|
arn_counter=self.arn_counter,
|
|
|
|
metadata=metadata,
|
|
|
|
typ=typ,
|
|
|
|
)
|
2019-11-01 19:16:59 +00:00
|
|
|
self.locations[location.arn] = location
|
2019-11-01 17:24:21 +00:00
|
|
|
return location.arn
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def _get_location(self, location_arn: str, typ: str) -> Location:
|
2019-11-05 10:30:05 +00:00
|
|
|
if location_arn not in self.locations:
|
2022-11-12 22:42:33 +00:00
|
|
|
raise InvalidRequestException(f"Location {location_arn} is not found.")
|
2019-11-05 10:30:05 +00:00
|
|
|
location = self.locations[location_arn]
|
|
|
|
if location.typ != typ:
|
2022-11-12 22:42:33 +00:00
|
|
|
raise InvalidRequestException(f"Invalid Location type: {location.typ}")
|
2019-11-05 10:30:05 +00:00
|
|
|
return location
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def delete_location(self, location_arn: str) -> None:
|
2019-11-05 10:30:05 +00:00
|
|
|
if location_arn in self.locations:
|
|
|
|
del self.locations[location_arn]
|
|
|
|
else:
|
|
|
|
raise InvalidRequestException
|
|
|
|
|
|
|
|
def create_task(
|
2022-12-11 13:47:26 +00:00
|
|
|
self,
|
|
|
|
source_location_arn: str,
|
|
|
|
destination_location_arn: str,
|
|
|
|
name: str,
|
|
|
|
metadata: Dict[str, Any],
|
|
|
|
) -> str:
|
2019-11-02 19:34:35 +00:00
|
|
|
if source_location_arn not in self.locations:
|
2022-11-12 22:42:33 +00:00
|
|
|
raise InvalidRequestException(f"Location {source_location_arn} not found.")
|
2019-11-02 19:34:35 +00:00
|
|
|
if destination_location_arn not in self.locations:
|
|
|
|
raise InvalidRequestException(
|
2022-11-12 22:42:33 +00:00
|
|
|
f"Location {destination_location_arn} not found."
|
2019-11-02 19:34:35 +00:00
|
|
|
)
|
2019-11-01 19:16:59 +00:00
|
|
|
self.arn_counter = self.arn_counter + 1
|
2019-11-02 19:34:35 +00:00
|
|
|
task = Task(
|
|
|
|
source_location_arn,
|
|
|
|
destination_location_arn,
|
|
|
|
name,
|
|
|
|
region_name=self.region_name,
|
|
|
|
arn_counter=self.arn_counter,
|
2019-11-05 10:30:05 +00:00
|
|
|
metadata=metadata,
|
2019-11-02 19:34:35 +00:00
|
|
|
)
|
2019-11-01 19:16:59 +00:00
|
|
|
self.tasks[task.arn] = task
|
|
|
|
return task.arn
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def _get_task(self, task_arn: str) -> Task:
|
2019-11-05 10:30:05 +00:00
|
|
|
if task_arn in self.tasks:
|
|
|
|
return self.tasks[task_arn]
|
|
|
|
else:
|
|
|
|
raise InvalidRequestException
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def update_task(self, task_arn: str, name: str, metadata: Dict[str, Any]) -> None:
|
2019-11-05 10:30:05 +00:00
|
|
|
if task_arn in self.tasks:
|
|
|
|
task = self.tasks[task_arn]
|
|
|
|
task.name = name
|
|
|
|
task.metadata = metadata
|
|
|
|
else:
|
2022-11-12 22:42:33 +00:00
|
|
|
raise InvalidRequestException(f"Sync task {task_arn} is not found.")
|
2019-11-05 10:30:05 +00:00
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def delete_task(self, task_arn: str) -> None:
|
2019-11-05 10:30:05 +00:00
|
|
|
if task_arn in self.tasks:
|
|
|
|
del self.tasks[task_arn]
|
|
|
|
else:
|
|
|
|
raise InvalidRequestException
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def start_task_execution(self, task_arn: str) -> str:
|
2019-11-01 19:16:59 +00:00
|
|
|
self.arn_counter = self.arn_counter + 1
|
2019-11-02 19:34:35 +00:00
|
|
|
if task_arn in self.tasks:
|
|
|
|
task = self.tasks[task_arn]
|
|
|
|
if task.status == "AVAILABLE":
|
|
|
|
task_execution = TaskExecution(task_arn, arn_counter=self.arn_counter)
|
|
|
|
self.task_executions[task_execution.arn] = task_execution
|
|
|
|
self.tasks[task_arn].current_task_execution_arn = task_execution.arn
|
|
|
|
self.tasks[task_arn].status = "RUNNING"
|
|
|
|
return task_execution.arn
|
|
|
|
raise InvalidRequestException("Invalid request.")
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def _get_task_execution(self, task_execution_arn: str) -> TaskExecution:
|
2019-11-05 10:30:05 +00:00
|
|
|
if task_execution_arn in self.task_executions:
|
|
|
|
return self.task_executions[task_execution_arn]
|
|
|
|
else:
|
|
|
|
raise InvalidRequestException
|
|
|
|
|
2022-12-11 13:47:26 +00:00
|
|
|
def cancel_task_execution(self, task_execution_arn: str) -> None:
|
2019-11-02 19:34:35 +00:00
|
|
|
if task_execution_arn in self.task_executions:
|
|
|
|
task_execution = self.task_executions[task_execution_arn]
|
|
|
|
task_execution.cancel()
|
|
|
|
task_arn = task_execution.task_arn
|
|
|
|
self.tasks[task_arn].current_task_execution_arn = None
|
2019-11-05 10:30:05 +00:00
|
|
|
self.tasks[task_arn].status = "AVAILABLE"
|
2019-11-02 19:34:35 +00:00
|
|
|
return
|
2022-11-12 22:42:33 +00:00
|
|
|
raise InvalidRequestException(f"Sync task {task_execution_arn} is not found.")
|
2019-11-02 19:34:35 +00:00
|
|
|
|
2019-11-01 17:24:21 +00:00
|
|
|
|
2021-12-24 21:02:45 +00:00
|
|
|
datasync_backends = BackendDict(DataSyncBackend, "datasync")
|