Cloudwatch - Return build-in S3 metrics - take 2 (#3839)

This commit is contained in:
Bert Blommers 2021-11-15 19:40:11 -01:00 committed by GitHub
parent 7664cab828
commit 52aeac1cee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 165 additions and 60 deletions

View File

@ -2,11 +2,15 @@ import json
from boto3 import Session from boto3 import Session
from moto.core import (
BaseBackend,
BaseModel,
CloudWatchMetricProvider,
)
from moto.core.utils import ( from moto.core.utils import (
iso_8601_datetime_without_milliseconds, iso_8601_datetime_without_milliseconds,
iso_8601_datetime_with_nanoseconds, iso_8601_datetime_with_nanoseconds,
) )
from moto.core import BaseBackend, BaseModel
from datetime import datetime, timedelta from datetime import datetime, timedelta
from dateutil.tz import tzutc from dateutil.tz import tzutc
from uuid import uuid4 from uuid import uuid4
@ -258,6 +262,7 @@ class Statistics:
self.timestamp = iso_8601_datetime_without_milliseconds(dt) self.timestamp = iso_8601_datetime_without_milliseconds(dt)
self.values = [] self.values = []
self.stats = stats self.stats = stats
self.unit = None
@property @property
def sample_count(self): def sample_count(self):
@ -266,10 +271,6 @@ class Statistics:
return len(self.values) return len(self.values)
@property
def unit(self):
return None
@property @property
def sum(self): def sum(self):
if "Sum" not in self.stats: if "Sum" not in self.stats:
@ -325,9 +326,10 @@ class CloudWatchBackend(BaseBackend):
# Retrieve a list of all OOTB metrics that are provided by metrics providers # Retrieve a list of all OOTB metrics that are provided by metrics providers
# Computed on the fly # Computed on the fly
def aws_metric_data(self): def aws_metric_data(self):
providers = CloudWatchMetricProvider.__subclasses__()
md = [] md = []
for name, service in metric_providers.items(): for provider in providers:
md.extend(service.get_cloudwatch_metrics()) md.extend(provider.get_cloudwatch_metrics())
return md return md
def put_metric_alarm( def put_metric_alarm(
@ -530,13 +532,14 @@ class CloudWatchBackend(BaseBackend):
end_time, end_time,
period, period,
stats, stats,
dimensions,
unit=None, unit=None,
dimensions=None,
): ):
period_delta = timedelta(seconds=period) period_delta = timedelta(seconds=period)
# TODO: Also filter by unit and dimensions
filtered_data = [ filtered_data = [
md md
for md in self.metric_data for md in self.get_all_metrics()
if md.namespace == namespace if md.namespace == namespace
and md.name == metric_name and md.name == metric_name
and start_time <= md.timestamp <= end_time and start_time <= md.timestamp <= end_time
@ -566,6 +569,7 @@ class CloudWatchBackend(BaseBackend):
dt + period_delta dt + period_delta
): ):
s.values.append(filtered_data[idx].value) s.values.append(filtered_data[idx].value)
s.unit = filtered_data[idx].unit
idx += 1 idx += 1
if not s.values: if not s.values:
@ -685,8 +689,3 @@ for region in Session().get_available_regions(
cloudwatch_backends[region] = CloudWatchBackend(region) cloudwatch_backends[region] = CloudWatchBackend(region)
for region in Session().get_available_regions("cloudwatch", partition_name="aws-cn"): for region in Session().get_available_regions("cloudwatch", partition_name="aws-cn"):
cloudwatch_backends[region] = CloudWatchBackend(region) cloudwatch_backends[region] = CloudWatchBackend(region)
# List of services that provide OOTB CW metrics
# See the S3Backend constructor for an example
# TODO: We might have to separate this out per region for non-global services
metric_providers = {}

View File

@ -186,11 +186,9 @@ class CloudWatchResponse(BaseResponse):
# Unsupported Parameters (To Be Implemented) # Unsupported Parameters (To Be Implemented)
unit = self._get_param("Unit") unit = self._get_param("Unit")
extended_statistics = self._get_param("ExtendedStatistics") extended_statistics = self._get_param("ExtendedStatistics")
if extended_statistics:
raise NotImplementedError()
# TODO: this should instead throw InvalidParameterCombination # TODO: this should instead throw InvalidParameterCombination
if not statistics: if not statistics and not extended_statistics:
raise NotImplementedError( raise NotImplementedError(
"Must specify either Statistics or ExtendedStatistics" "Must specify either Statistics or ExtendedStatistics"
) )
@ -202,7 +200,7 @@ class CloudWatchResponse(BaseResponse):
end_time, end_time,
period, period,
statistics, statistics,
unit, unit=unit,
dimensions=dimensions, dimensions=dimensions,
) )
template = self.response_template(GET_METRIC_STATISTICS_TEMPLATE) template = self.response_template(GET_METRIC_STATISTICS_TEMPLATE)

View File

@ -1,5 +1,5 @@
from .models import BaseModel, BaseBackend, moto_api_backend, ACCOUNT_ID # noqa from .models import BaseModel, BaseBackend, moto_api_backend, ACCOUNT_ID # noqa
from .models import CloudFormationModel # noqa from .models import CloudFormationModel, CloudWatchMetricProvider # noqa
from .models import patch_client, patch_resource # noqa from .models import patch_client, patch_resource # noqa
from .responses import ActionAuthenticatorMixin from .responses import ActionAuthenticatorMixin

View File

@ -958,4 +958,11 @@ class MotoAPIBackend(BaseBackend):
self.__init__() self.__init__()
class CloudWatchMetricProvider(object):
@staticmethod
@abstractmethod
def get_cloudwatch_metrics():
pass
moto_api_backend = MotoAPIBackend() moto_api_backend = MotoAPIBackend()

View File

@ -10,14 +10,20 @@ import random
import string import string
import tempfile import tempfile
import threading import threading
import pytz
import sys import sys
import time import time
import uuid import uuid
from bisect import insort from bisect import insort
import pytz from moto.core import (
ACCOUNT_ID,
BaseBackend,
BaseModel,
CloudFormationModel,
CloudWatchMetricProvider,
)
from moto.core import ACCOUNT_ID, BaseBackend, BaseModel, CloudFormationModel
from moto.core.utils import ( from moto.core.utils import (
iso_8601_datetime_without_milliseconds_s3, iso_8601_datetime_without_milliseconds_s3,
rfc_1123_datetime, rfc_1123_datetime,
@ -1314,7 +1320,7 @@ class FakeBucket(CloudFormationModel):
return now.strftime("%Y-%m-%dT%H:%M:%SZ") return now.strftime("%Y-%m-%dT%H:%M:%SZ")
class S3Backend(BaseBackend): class S3Backend(BaseBackend, CloudWatchMetricProvider):
def __init__(self): def __init__(self):
self.buckets = {} self.buckets = {}
self.account_public_access_block = None self.account_public_access_block = None
@ -1358,9 +1364,10 @@ class S3Backend(BaseBackend):
# Must provide a method 'get_cloudwatch_metrics' that will return a list of metrics, based on the data available # Must provide a method 'get_cloudwatch_metrics' that will return a list of metrics, based on the data available
# metric_providers["S3"] = self # metric_providers["S3"] = self
def get_cloudwatch_metrics(self): @classmethod
def get_cloudwatch_metrics(cls):
metrics = [] metrics = []
for name, bucket in self.buckets.items(): for name, bucket in s3_backend.buckets.items():
metrics.append( metrics.append(
MetricDatum( MetricDatum(
namespace="AWS/S3", namespace="AWS/S3",
@ -1370,7 +1377,10 @@ class S3Backend(BaseBackend):
{"Name": "StorageType", "Value": "StandardStorage"}, {"Name": "StorageType", "Value": "StandardStorage"},
{"Name": "BucketName", "Value": name}, {"Name": "BucketName", "Value": name},
], ],
timestamp=datetime.datetime.now(), timestamp=datetime.datetime.now(tz=pytz.utc).replace(
hour=0, minute=0, second=0, microsecond=0
),
unit="Bytes",
) )
) )
metrics.append( metrics.append(
@ -1382,7 +1392,10 @@ class S3Backend(BaseBackend):
{"Name": "StorageType", "Value": "AllStorageTypes"}, {"Name": "StorageType", "Value": "AllStorageTypes"},
{"Name": "BucketName", "Value": name}, {"Name": "BucketName", "Value": name},
], ],
timestamp=datetime.datetime.now(), timestamp=datetime.datetime.now(tz=pytz.utc).replace(
hour=0, minute=0, second=0, microsecond=0
),
unit="Count",
) )
) )
return metrics return metrics

View File

@ -1,9 +1,10 @@
import boto import boto
from boto.ec2.cloudwatch.alarm import MetricAlarm from boto.ec2.cloudwatch.alarm import MetricAlarm
from boto.s3.key import Key
from datetime import datetime from datetime import datetime
import sure # noqa # pylint: disable=unused-import import sure # noqa # pylint: disable=unused-import
from moto import mock_cloudwatch_deprecated from moto import mock_cloudwatch_deprecated, mock_s3_deprecated
def alarm_fixture(name="tester", action=None): def alarm_fixture(name="tester", action=None):
@ -194,34 +195,31 @@ def test_get_metric_statistics():
datapoints.should.have.length_of(0) datapoints.should.have.length_of(0)
# TODO: THIS IS CURRENTLY BROKEN! @mock_s3_deprecated
# @mock_s3_deprecated @mock_cloudwatch_deprecated
# @mock_cloudwatch_deprecated def test_cloudwatch_return_s3_metrics():
# def test_cloudwatch_return_s3_metrics():
# region = "us-east-1"
# region = "us-east-1"
# cw = boto.ec2.cloudwatch.connect_to_region(region)
# cw = boto.ec2.cloudwatch.connect_to_region(region) s3 = boto.s3.connect_to_region(region)
# s3 = boto.s3.connect_to_region(region) bucket_name_1 = "test-bucket-1"
# bucket_name_2 = "test-bucket-2"
# bucket_name_1 = "test-bucket-1"
# bucket_name_2 = "test-bucket-2" bucket1 = s3.create_bucket(bucket_name=bucket_name_1)
# key = Key(bucket1)
# bucket1 = s3.create_bucket(bucket_name=bucket_name_1) key.key = "the-key"
# key = Key(bucket1) key.set_contents_from_string("foobar" * 4)
# key.key = "the-key" s3.create_bucket(bucket_name=bucket_name_2)
# key.set_contents_from_string("foobar" * 4)
# s3.create_bucket(bucket_name=bucket_name_2) metrics_s3_bucket_1 = cw.list_metrics(dimensions={"BucketName": bucket_name_1})
#
# metrics_s3_bucket_1 = cw.list_metrics(dimensions={"BucketName": bucket_name_1}) # Verify that the OOTB S3 metrics are available for the created buckets
# # Verify that the OOTB S3 metrics are available for the created buckets len(metrics_s3_bucket_1).should.be(2)
# len(metrics_s3_bucket_1).should.be(2) metric_names = [m.name for m in metrics_s3_bucket_1]
# metric_names = [m.name for m in metrics_s3_bucket_1] sorted(metric_names).should.equal(["BucketSizeBytes", "NumberOfObjects"])
# sorted(metric_names).should.equal(
# ["Metric:BucketSizeBytes", "Metric:NumberOfObjects"] # Delete everything, to make sure it's not picked up in later tests
# ) bucket1.delete_key("the-key")
# s3.delete_bucket("test-bucket-1")
# # Explicit clean up - the metrics for these buckets are messing with subsequent tests s3.delete_bucket("test-bucket-2")
# key.delete()
# s3.delete_bucket(bucket_name_1)
# s3.delete_bucket(bucket_name_2)

View File

@ -11,7 +11,7 @@ from freezegun import freeze_time
from operator import itemgetter from operator import itemgetter
from uuid import uuid4 from uuid import uuid4
from moto import mock_cloudwatch from moto import mock_cloudwatch, mock_s3
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
@ -170,6 +170,7 @@ def test_get_metric_statistics_dimensions():
Statistics=["Average", "Sum"], Statistics=["Average", "Sum"],
**params[0], **params[0],
) )
print(stats)
stats["Datapoints"].should.have.length_of(1) stats["Datapoints"].should.have.length_of(1)
datapoint = stats["Datapoints"][0] datapoint = stats["Datapoints"][0]
datapoint["Sum"].should.equal(params[1]) datapoint["Sum"].should.equal(params[1])
@ -342,7 +343,7 @@ def test_list_metrics():
def test_list_metrics_paginated(): def test_list_metrics_paginated():
cloudwatch = boto3.client("cloudwatch", "eu-west-1") cloudwatch = boto3.client("cloudwatch", "eu-west-1")
# Verify that only a single page of metrics is returned # Verify that only a single page of metrics is returned
cloudwatch.list_metrics()["Metrics"].should.be.empty cloudwatch.list_metrics().shouldnt.have.key("NextToken")
# Verify we can't pass a random NextToken # Verify we can't pass a random NextToken
with pytest.raises(ClientError) as e: with pytest.raises(ClientError) as e:
cloudwatch.list_metrics(NextToken=str(uuid4())) cloudwatch.list_metrics(NextToken=str(uuid4()))
@ -710,6 +711,95 @@ def test_get_metric_data_for_multiple_metrics():
res2["Values"].should.equal([25.0]) res2["Values"].should.equal([25.0])
@mock_cloudwatch
@mock_s3
def test_cloudwatch_return_s3_metrics():
utc_now = datetime.now(tz=pytz.utc)
bucket_name = "examplebucket"
cloudwatch = boto3.client("cloudwatch", "eu-west-3")
# given
s3 = boto3.resource("s3")
s3_client = boto3.client("s3")
bucket = s3.Bucket(bucket_name)
bucket.create(CreateBucketConfiguration={"LocationConstraint": "eu-west-3"})
bucket.put_object(Body=b"ABCD", Key="file.txt")
# when
metrics = cloudwatch.list_metrics(
Dimensions=[{"Name": "BucketName", "Value": bucket_name}]
)["Metrics"]
# then
metrics.should.have.length_of(2)
metrics.should.contain(
{
"Namespace": "AWS/S3",
"MetricName": "NumberOfObjects",
"Dimensions": [
{"Name": "StorageType", "Value": "AllStorageTypes"},
{"Name": "BucketName", "Value": bucket_name},
],
}
)
metrics.should.contain(
{
"Namespace": "AWS/S3",
"MetricName": "BucketSizeBytes",
"Dimensions": [
{"Name": "StorageType", "Value": "StandardStorage"},
{"Name": "BucketName", "Value": bucket_name},
],
}
)
# when
stats = cloudwatch.get_metric_statistics(
Namespace="AWS/S3",
MetricName="BucketSizeBytes",
Dimensions=[
{"Name": "BucketName", "Value": bucket_name},
{"Name": "StorageType", "Value": "StandardStorage"},
],
StartTime=utc_now - timedelta(days=2),
EndTime=utc_now,
Period=86400,
Statistics=["Average"],
Unit="Bytes",
)
# then
stats.should.have.key("Label").equal("BucketSizeBytes")
stats.should.have.key("Datapoints").length_of(1)
data_point = stats["Datapoints"][0]
data_point.should.have.key("Average").being.above(0)
data_point.should.have.key("Unit").being.equal("Bytes")
# when
stats = cloudwatch.get_metric_statistics(
Namespace="AWS/S3",
MetricName="NumberOfObjects",
Dimensions=[
{"Name": "BucketName", "Value": bucket_name},
{"Name": "StorageType", "Value": "AllStorageTypes"},
],
StartTime=utc_now - timedelta(days=2),
EndTime=utc_now,
Period=86400,
Statistics=["Average"],
)
# then
stats.should.have.key("Label").equal("NumberOfObjects")
stats.should.have.key("Datapoints").length_of(1)
data_point = stats["Datapoints"][0]
data_point.should.have.key("Average").being.equal(1)
data_point.should.have.key("Unit").being.equal("Count")
s3_client.delete_object(Bucket=bucket_name, Key="file.txt")
s3_client.delete_bucket(Bucket=bucket_name)
@mock_cloudwatch @mock_cloudwatch
def test_put_metric_alarm(): def test_put_metric_alarm():
# given # given