Adds some basic endpoints for Amazon Forecast (#3434)

* Adding some basic endpoints for Amazon Forecast, including all dataset group related endpoints

* Adds better testing around exception handling in forecast endpoint, removes some unused code, and cleans up validation code

* Fix unused imports, optimize imports, code style fixes

Co-authored-by: Paul Miller <pwmiller@amazon.com>
This commit is contained in:
pwrmiller 2020-11-06 03:23:47 -05:00 committed by GitHub
parent b7cf2d4478
commit 725ad7571d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 555 additions and 6 deletions

View File

@ -3518,34 +3518,34 @@
## forecast ## forecast
<details> <details>
<summary>0% implemented</summary> <summary>19% implemented</summary>
- [ ] create_dataset - [ ] create_dataset
- [ ] create_dataset_group - [X] create_dataset_group
- [ ] create_dataset_import_job - [ ] create_dataset_import_job
- [ ] create_forecast - [ ] create_forecast
- [ ] create_forecast_export_job - [ ] create_forecast_export_job
- [ ] create_predictor - [ ] create_predictor
- [ ] delete_dataset - [ ] delete_dataset
- [ ] delete_dataset_group - [X] delete_dataset_group
- [ ] delete_dataset_import_job - [ ] delete_dataset_import_job
- [ ] delete_forecast - [ ] delete_forecast
- [ ] delete_forecast_export_job - [ ] delete_forecast_export_job
- [ ] delete_predictor - [ ] delete_predictor
- [ ] describe_dataset - [ ] describe_dataset
- [ ] describe_dataset_group - [X] describe_dataset_group
- [ ] describe_dataset_import_job - [ ] describe_dataset_import_job
- [ ] describe_forecast - [ ] describe_forecast
- [ ] describe_forecast_export_job - [ ] describe_forecast_export_job
- [ ] describe_predictor - [ ] describe_predictor
- [ ] get_accuracy_metrics - [ ] get_accuracy_metrics
- [ ] list_dataset_groups - [X] list_dataset_groups
- [ ] list_dataset_import_jobs - [ ] list_dataset_import_jobs
- [ ] list_datasets - [ ] list_datasets
- [ ] list_forecast_export_jobs - [ ] list_forecast_export_jobs
- [ ] list_forecasts - [ ] list_forecasts
- [ ] list_predictors - [ ] list_predictors
- [ ] update_dataset_group - [X] update_dataset_group
</details> </details>
## forecastquery ## forecastquery

View File

@ -102,6 +102,7 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L
| ELB | @mock_elb | core endpoints done | | | ELB | @mock_elb | core endpoints done | |
| ELBv2 | @mock_elbv2 | all endpoints done | | | ELBv2 | @mock_elbv2 | all endpoints done | |
| EMR | @mock_emr | core endpoints done | | | EMR | @mock_emr | core endpoints done | |
| Forecast | @mock_forecast | some core endpoints done | |
| Glacier | @mock_glacier | core endpoints done | | | Glacier | @mock_glacier | core endpoints done | |
| IAM | @mock_iam | core endpoints done | | | IAM | @mock_iam | core endpoints done | |
| IoT | @mock_iot | core endpoints done | | | IoT | @mock_iot | core endpoints done | |

View File

@ -60,6 +60,8 @@ Currently implemented Services:
+---------------------------+-----------------------+------------------------------------+ +---------------------------+-----------------------+------------------------------------+
| EMR | @mock_emr | core endpoints done | | EMR | @mock_emr | core endpoints done |
+---------------------------+-----------------------+------------------------------------+ +---------------------------+-----------------------+------------------------------------+
| Forecast | @mock_forecast | basic endpoints done |
+---------------------------+-----------------------+------------------------------------+
| Glacier | @mock_glacier | core endpoints done | | Glacier | @mock_glacier | core endpoints done |
+---------------------------+-----------------------+------------------------------------+ +---------------------------+-----------------------+------------------------------------+
| IAM | @mock_iam | core endpoints done | | IAM | @mock_iam | core endpoints done |

View File

@ -63,6 +63,7 @@ mock_elbv2 = lazy_load(".elbv2", "mock_elbv2")
mock_emr = lazy_load(".emr", "mock_emr") mock_emr = lazy_load(".emr", "mock_emr")
mock_emr_deprecated = lazy_load(".emr", "mock_emr_deprecated") mock_emr_deprecated = lazy_load(".emr", "mock_emr_deprecated")
mock_events = lazy_load(".events", "mock_events") mock_events = lazy_load(".events", "mock_events")
mock_forecast = lazy_load(".forecast", "mock_forecast")
mock_glacier = lazy_load(".glacier", "mock_glacier") mock_glacier = lazy_load(".glacier", "mock_glacier")
mock_glacier_deprecated = lazy_load(".glacier", "mock_glacier_deprecated") mock_glacier_deprecated = lazy_load(".glacier", "mock_glacier_deprecated")
mock_glue = lazy_load(".glue", "mock_glue") mock_glue = lazy_load(".glue", "mock_glue")

View File

@ -75,6 +75,7 @@ BACKENDS = {
"kinesisvideoarchivedmedia", "kinesisvideoarchivedmedia",
"kinesisvideoarchivedmedia_backends", "kinesisvideoarchivedmedia_backends",
), ),
"forecast": ("forecast", "forecast_backends"),
} }

View File

@ -0,0 +1,7 @@
from __future__ import unicode_literals
from .models import forecast_backends
from ..core.models import base_decorator
forecast_backend = forecast_backends["us-east-1"]
mock_forecast = base_decorator(forecast_backends)

View File

@ -0,0 +1,43 @@
from __future__ import unicode_literals
import json
class AWSError(Exception):
TYPE = None
STATUS = 400
def __init__(self, message, type=None, status=None):
self.message = message
self.type = type if type is not None else self.TYPE
self.status = status if status is not None else self.STATUS
def response(self):
return (
json.dumps({"__type": self.type, "message": self.message}),
dict(status=self.status),
)
class InvalidInputException(AWSError):
TYPE = "InvalidInputException"
class ResourceAlreadyExistsException(AWSError):
TYPE = "ResourceAlreadyExistsException"
class ResourceNotFoundException(AWSError):
TYPE = "ResourceNotFoundException"
class ResourceInUseException(AWSError):
TYPE = "ResourceInUseException"
class LimitExceededException(AWSError):
TYPE = "LimitExceededException"
class ValidationException(AWSError):
TYPE = "ValidationException"

173
moto/forecast/models.py Normal file
View File

@ -0,0 +1,173 @@
import re
from datetime import datetime
from boto3 import Session
from future.utils import iteritems
from moto.core import ACCOUNT_ID, BaseBackend
from moto.core.utils import iso_8601_datetime_without_milliseconds
from .exceptions import (
InvalidInputException,
ResourceAlreadyExistsException,
ResourceNotFoundException,
ValidationException,
)
class DatasetGroup:
accepted_dataset_group_name_format = re.compile(r"^[a-zA-Z][a-z-A-Z0-9_]*")
accepted_dataset_group_arn_format = re.compile(r"^[a-zA-Z0-9\-\_\.\/\:]+$")
accepted_dataset_types = [
"INVENTORY_PLANNING",
"METRICS",
"RETAIL",
"EC2_CAPACITY",
"CUSTOM",
"WEB_TRAFFIC",
"WORK_FORCE",
]
def __init__(
self, region_name, dataset_arns, dataset_group_name, domain, tags=None
):
self.creation_date = iso_8601_datetime_without_milliseconds(datetime.now())
self.modified_date = self.creation_date
self.arn = (
"arn:aws:forecast:"
+ region_name
+ ":"
+ str(ACCOUNT_ID)
+ ":dataset-group/"
+ dataset_group_name
)
self.dataset_arns = dataset_arns if dataset_arns else []
self.dataset_group_name = dataset_group_name
self.domain = domain
self.tags = tags
self._validate()
def update(self, dataset_arns):
self.dataset_arns = dataset_arns
self.last_modified_date = iso_8601_datetime_without_milliseconds(datetime.now())
def _validate(self):
errors = []
errors.extend(self._validate_dataset_group_name())
errors.extend(self._validate_dataset_group_name_len())
errors.extend(self._validate_dataset_group_domain())
if errors:
err_count = len(errors)
message = str(err_count) + " validation error"
message += "s" if err_count > 1 else ""
message += " detected: "
message += "; ".join(errors)
raise ValidationException(message)
def _validate_dataset_group_name(self):
errors = []
if not re.match(
self.accepted_dataset_group_name_format, self.dataset_group_name
):
errors.append(
"Value '"
+ self.dataset_group_name
+ "' at 'datasetGroupName' failed to satisfy constraint: Member must satisfy regular expression pattern "
+ self.accepted_dataset_group_name_format.pattern
)
return errors
def _validate_dataset_group_name_len(self):
errors = []
if len(self.dataset_group_name) >= 64:
errors.append(
"Value '"
+ self.dataset_group_name
+ "' at 'datasetGroupName' failed to satisfy constraint: Member must have length less than or equal to 63"
)
return errors
def _validate_dataset_group_domain(self):
errors = []
if self.domain not in self.accepted_dataset_types:
errors.append(
"Value '"
+ self.domain
+ "' at 'domain' failed to satisfy constraint: Member must satisfy enum value set "
+ str(self.accepted_dataset_types)
)
return errors
class ForecastBackend(BaseBackend):
def __init__(self, region_name):
super(ForecastBackend, self).__init__()
self.dataset_groups = {}
self.datasets = {}
self.region_name = region_name
def create_dataset_group(self, dataset_group_name, domain, dataset_arns, tags):
dataset_group = DatasetGroup(
region_name=self.region_name,
dataset_group_name=dataset_group_name,
domain=domain,
dataset_arns=dataset_arns,
tags=tags,
)
if dataset_arns:
for dataset_arn in dataset_arns:
if dataset_arn not in self.datasets:
raise InvalidInputException(
"Dataset arns: [" + dataset_arn + "] are not found"
)
if self.dataset_groups.get(dataset_group.arn):
raise ResourceAlreadyExistsException(
"A dataset group already exists with the arn: " + dataset_group.arn
)
self.dataset_groups[dataset_group.arn] = dataset_group
return dataset_group
def describe_dataset_group(self, dataset_group_arn):
try:
dataset_group = self.dataset_groups[dataset_group_arn]
except KeyError:
raise ResourceNotFoundException("No resource found " + dataset_group_arn)
return dataset_group
def delete_dataset_group(self, dataset_group_arn):
try:
del self.dataset_groups[dataset_group_arn]
except KeyError:
raise ResourceNotFoundException("No resource found " + dataset_group_arn)
def update_dataset_group(self, dataset_group_arn, dataset_arns):
try:
dsg = self.dataset_groups[dataset_group_arn]
except KeyError:
raise ResourceNotFoundException("No resource found " + dataset_group_arn)
for dataset_arn in dataset_arns:
if dataset_arn not in dsg.dataset_arns:
raise InvalidInputException(
"Dataset arns: [" + dataset_arn + "] are not found"
)
dsg.update(dataset_arns)
def list_dataset_groups(self):
return [v for (_, v) in iteritems(self.dataset_groups)]
def reset(self):
region_name = self.region_name
self.__dict__ = {}
self.__init__(region_name)
forecast_backends = {}
for region in Session().get_available_regions("forecast"):
forecast_backends[region] = ForecastBackend(region)

View File

@ -0,0 +1,92 @@
from __future__ import unicode_literals
import json
from moto.core.responses import BaseResponse
from moto.core.utils import amzn_request_id
from .exceptions import AWSError
from .models import forecast_backends
class ForecastResponse(BaseResponse):
@property
def forecast_backend(self):
return forecast_backends[self.region]
@amzn_request_id
def create_dataset_group(self):
dataset_group_name = self._get_param("DatasetGroupName")
domain = self._get_param("Domain")
dataset_arns = self._get_param("DatasetArns")
tags = self._get_param("Tags")
try:
dataset_group = self.forecast_backend.create_dataset_group(
dataset_group_name=dataset_group_name,
domain=domain,
dataset_arns=dataset_arns,
tags=tags,
)
response = {"DatasetGroupArn": dataset_group.arn}
return 200, {}, json.dumps(response)
except AWSError as err:
return err.response()
@amzn_request_id
def describe_dataset_group(self):
dataset_group_arn = self._get_param("DatasetGroupArn")
try:
dataset_group = self.forecast_backend.describe_dataset_group(
dataset_group_arn=dataset_group_arn
)
response = {
"CreationTime": dataset_group.creation_date,
"DatasetArns": dataset_group.dataset_arns,
"DatasetGroupArn": dataset_group.arn,
"DatasetGroupName": dataset_group.dataset_group_name,
"Domain": dataset_group.domain,
"LastModificationTime": dataset_group.modified_date,
"Status": "ACTIVE",
}
return 200, {}, json.dumps(response)
except AWSError as err:
return err.response()
@amzn_request_id
def delete_dataset_group(self):
dataset_group_arn = self._get_param("DatasetGroupArn")
try:
self.forecast_backend.delete_dataset_group(dataset_group_arn)
return 200, {}, None
except AWSError as err:
return err.response()
@amzn_request_id
def update_dataset_group(self):
dataset_group_arn = self._get_param("DatasetGroupArn")
dataset_arns = self._get_param("DatasetArns")
try:
self.forecast_backend.update_dataset_group(dataset_group_arn, dataset_arns)
return 200, {}, None
except AWSError as err:
return err.response()
@amzn_request_id
def list_dataset_groups(self):
list_all = self.forecast_backend.list_dataset_groups()
list_all = sorted(
[
{
"DatasetGroupArn": dsg.arn,
"DatasetGroupName": dsg.dataset_group_name,
"CreationTime": dsg.creation_date,
"LastModificationTime": dsg.creation_date,
}
for dsg in list_all
],
key=lambda x: x["LastModificationTime"],
reverse=True,
)
response = {"DatasetGroups": list_all}
return 200, {}, json.dumps(response)

7
moto/forecast/urls.py Normal file
View File

@ -0,0 +1,7 @@
from __future__ import unicode_literals
from .responses import ForecastResponse
url_bases = ["https?://forecast.(.+).amazonaws.com"]
url_paths = {"{0}/$": ForecastResponse.dispatch}

View File

View File

@ -0,0 +1,222 @@
from __future__ import unicode_literals
import boto3
import sure # noqa
from botocore.exceptions import ClientError
from nose.tools import assert_raises
from parameterized import parameterized
from moto import mock_forecast
from moto.core import ACCOUNT_ID
region = "us-east-1"
account_id = None
valid_domains = [
"RETAIL",
"CUSTOM",
"INVENTORY_PLANNING",
"EC2_CAPACITY",
"WORK_FORCE",
"WEB_TRAFFIC",
"METRICS",
]
@parameterized(valid_domains)
@mock_forecast
def test_forecast_dataset_group_create(domain):
name = "example_dataset_group"
client = boto3.client("forecast", region_name=region)
response = client.create_dataset_group(DatasetGroupName=name, Domain=domain)
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
response["DatasetGroupArn"].should.equal(
"arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset-group/" + name
)
@mock_forecast
def test_forecast_dataset_group_create_invalid_domain():
name = "example_dataset_group"
client = boto3.client("forecast", region_name=region)
invalid_domain = "INVALID"
with assert_raises(ClientError) as exc:
client.create_dataset_group(DatasetGroupName=name, Domain=invalid_domain)
exc.exception.response["Error"]["Code"].should.equal("ValidationException")
exc.exception.response["Error"]["Message"].should.equal(
"1 validation error detected: Value '"
+ invalid_domain
+ "' at 'domain' failed to satisfy constraint: Member must satisfy enum value set ['INVENTORY_PLANNING', 'METRICS', 'RETAIL', 'EC2_CAPACITY', 'CUSTOM', 'WEB_TRAFFIC', 'WORK_FORCE']"
)
@parameterized([" ", "a" * 64])
@mock_forecast
def test_forecast_dataset_group_create_invalid_name(name):
client = boto3.client("forecast", region_name=region)
with assert_raises(ClientError) as exc:
client.create_dataset_group(DatasetGroupName=name, Domain="CUSTOM")
exc.exception.response["Error"]["Code"].should.equal("ValidationException")
exc.exception.response["Error"]["Message"].should.contain(
"1 validation error detected: Value '"
+ name
+ "' at 'datasetGroupName' failed to satisfy constraint: Member must"
)
@mock_forecast
def test_forecast_dataset_group_create_duplicate_fails():
client = boto3.client("forecast", region_name=region)
client.create_dataset_group(DatasetGroupName="name", Domain="RETAIL")
with assert_raises(ClientError) as exc:
client.create_dataset_group(DatasetGroupName="name", Domain="RETAIL")
exc.exception.response["Error"]["Code"].should.equal(
"ResourceAlreadyExistsException"
)
@mock_forecast
def test_forecast_dataset_group_list_default_empty():
client = boto3.client("forecast", region_name=region)
list = client.list_dataset_groups()
list["DatasetGroups"].should.be.empty
@mock_forecast
def test_forecast_dataset_group_list_some():
client = boto3.client("forecast", region_name=region)
client.create_dataset_group(DatasetGroupName="hello", Domain="CUSTOM")
result = client.list_dataset_groups()
assert len(result["DatasetGroups"]) == 1
result["DatasetGroups"][0]["DatasetGroupArn"].should.equal(
"arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset-group/hello"
)
@mock_forecast
def test_forecast_delete_dataset_group():
dataset_group_name = "name"
dataset_group_arn = (
"arn:aws:forecast:"
+ region
+ ":"
+ ACCOUNT_ID
+ ":dataset-group/"
+ dataset_group_name
)
client = boto3.client("forecast", region_name=region)
client.create_dataset_group(DatasetGroupName=dataset_group_name, Domain="CUSTOM")
client.delete_dataset_group(DatasetGroupArn=dataset_group_arn)
@mock_forecast
def test_forecast_delete_dataset_group_missing():
client = boto3.client("forecast", region_name=region)
missing_dsg_arn = (
"arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset-group/missing"
)
with assert_raises(ClientError) as exc:
client.delete_dataset_group(DatasetGroupArn=missing_dsg_arn)
exc.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
exc.exception.response["Error"]["Message"].should.equal(
"No resource found " + missing_dsg_arn
)
@mock_forecast
def test_forecast_update_dataset_arns_empty():
dataset_group_name = "name"
dataset_group_arn = (
"arn:aws:forecast:"
+ region
+ ":"
+ ACCOUNT_ID
+ ":dataset-group/"
+ dataset_group_name
)
client = boto3.client("forecast", region_name=region)
client.create_dataset_group(DatasetGroupName=dataset_group_name, Domain="CUSTOM")
client.update_dataset_group(DatasetGroupArn=dataset_group_arn, DatasetArns=[])
@mock_forecast
def test_forecast_update_dataset_group_not_found():
client = boto3.client("forecast", region_name=region)
dataset_group_arn = (
"arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset-group/" + "test"
)
with assert_raises(ClientError) as exc:
client.update_dataset_group(DatasetGroupArn=dataset_group_arn, DatasetArns=[])
exc.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
exc.exception.response["Error"]["Message"].should.equal(
"No resource found " + dataset_group_arn
)
@mock_forecast
def test_describe_dataset_group():
name = "test"
client = boto3.client("forecast", region_name=region)
dataset_group_arn = (
"arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset-group/" + name
)
client.create_dataset_group(DatasetGroupName=name, Domain="CUSTOM")
result = client.describe_dataset_group(DatasetGroupArn=dataset_group_arn)
assert result.get("DatasetGroupArn") == dataset_group_arn
assert result.get("Domain") == "CUSTOM"
assert result.get("DatasetArns") == []
@mock_forecast
def test_describe_dataset_group_missing():
client = boto3.client("forecast", region_name=region)
dataset_group_arn = (
"arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset-group/name"
)
with assert_raises(ClientError) as exc:
client.describe_dataset_group(DatasetGroupArn=dataset_group_arn)
exc.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
exc.exception.response["Error"]["Message"].should.equal(
"No resource found " + dataset_group_arn
)
@mock_forecast
def test_create_dataset_group_missing_datasets():
client = boto3.client("forecast", region_name=region)
dataset_arn = "arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset/name"
with assert_raises(ClientError) as exc:
client.create_dataset_group(
DatasetGroupName="name", Domain="CUSTOM", DatasetArns=[dataset_arn]
)
exc.exception.response["Error"]["Code"].should.equal("InvalidInputException")
exc.exception.response["Error"]["Message"].should.equal(
"Dataset arns: [" + dataset_arn + "] are not found"
)
@mock_forecast
def test_update_dataset_group_missing_datasets():
name = "test"
client = boto3.client("forecast", region_name=region)
dataset_group_arn = (
"arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset-group/" + name
)
client.create_dataset_group(DatasetGroupName=name, Domain="CUSTOM")
dataset_arn = "arn:aws:forecast:" + region + ":" + ACCOUNT_ID + ":dataset/name"
with assert_raises(ClientError) as exc:
client.update_dataset_group(
DatasetGroupArn=dataset_group_arn, DatasetArns=[dataset_arn]
)
exc.exception.response["Error"]["Code"].should.equal("InvalidInputException")
exc.exception.response["Error"]["Message"].should.equal(
"Dataset arns: [" + dataset_arn + "] are not found"
)