CloudWatchLogs: create_export_task() (#4783)

This commit is contained in:
Felipe Alvarez 2022-01-25 23:01:16 +10:00 committed by GitHub
parent d53dd23390
commit 9a5d5b5d25
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 111 additions and 6 deletions

View File

@ -3354,7 +3354,7 @@
- [ ] associate_kms_key
- [ ] cancel_export_task
- [ ] create_export_task
- [X] create_export_task
- [X] create_log_group
- [X] create_log_stream
- [ ] delete_destination
@ -5448,4 +5448,4 @@
- workspaces
- workspaces-web
- xray
</details>
</details>

View File

@ -27,7 +27,7 @@ logs
- [ ] associate_kms_key
- [ ] cancel_export_task
- [ ] create_export_task
- [X] create_export_task
- [X] create_log_group
- [X] create_log_stream
- [ ] delete_destination
@ -53,7 +53,7 @@ logs
The next_token and limit arguments are ignored. The maximum
number of resource policies per region is a small number (less
than 50), so pagination isn't needed.
- [X] describe_subscription_filters
- [ ] disassociate_kms_key
@ -78,4 +78,3 @@ logs
- [X] tag_log_group
- [ ] test_metric_filter
- [X] untag_log_group

View File

@ -14,6 +14,7 @@ from moto.logs.exceptions import (
InvalidParameterException,
LimitExceededException,
)
from moto.s3 import s3_backend
from .utils import PAGINATION_MODEL
MAX_RESOURCE_POLICIES_PER_REGION = 10
@ -982,5 +983,22 @@ class LogsBackend(BaseBackend):
self.queries[query_id] = LogQuery(query_id, start_time, end_time, query_string)
return query_id
def create_export_task(
self,
*,
task_name,
log_group_name,
log_stream_name_prefix,
fromTime,
to,
destination,
destination_prefix,
):
s3_backend.get_bucket(destination)
if log_group_name not in self.groups:
raise ResourceNotFoundException()
task_id = uuid.uuid4()
return task_id
logs_backends = BackendDict(LogsBackend, "logs")

View File

@ -379,3 +379,23 @@ class LogsResponse(BaseResponse):
)
return json.dumps({"queryId": "{0}".format(query_id)})
def create_export_task(self):
task_name = self._get_param("taskName")
log_group_name = self._get_param("logGroupName")
log_group_name = self._get_param("logGroupName")
log_stream_name_prefix = self._get_param("logStreamNamePrefix")
fromTime = self._get_param("from")
to = self._get_param("to")
destination = self._get_param("destination")
destination_prefix = self._get_param("destinationPrefix")
task_id = self.logs_backend.create_export_task(
task_name=task_name,
log_group_name=log_group_name,
log_stream_name_prefix=log_stream_name_prefix,
fromTime=fromTime,
to=to,
destination=destination,
destination_prefix=destination_prefix,
)
return json.dumps(dict(taskId=str(task_id)))

View File

@ -4,13 +4,14 @@ import time
import sure # noqa # pylint: disable=unused-import
from unittest import SkipTest
from datetime import timedelta, datetime
from uuid import UUID
import boto3
import pytest
from botocore.exceptions import ClientError
from freezegun import freeze_time
from moto import mock_logs, settings
from moto import mock_logs, mock_s3, settings
from moto.core.utils import unix_time_millis
from moto.logs.models import MAX_RESOURCE_POLICIES_PER_REGION
@ -39,6 +40,16 @@ def json_policy_doc():
)
@pytest.fixture(scope="function")
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
os.environ["AWS_SECURITY_TOKEN"] = "testing"
os.environ["AWS_SESSION_TOKEN"] = "testing"
os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
@mock_logs
def test_describe_metric_filters_happy_prefix():
conn = boto3.client("logs", "us-west-2")
@ -1419,3 +1430,60 @@ def test_describe_log_streams_no_prefix():
err["Message"].should.equal(
"Cannot order by LastEventTime with a logStreamNamePrefix."
)
@mock_s3
@mock_logs
def test_create_export_task_happy_path(aws_credentials):
log_group_name = "/aws/codebuild/blah1"
destination = "mybucket"
fromTime = 1611316574
to = 1642852574
logs = boto3.client("logs", region_name="ap-southeast-1")
s3 = boto3.client("s3")
logs.create_log_group(logGroupName=log_group_name)
s3.create_bucket(Bucket=destination)
resp = logs.create_export_task(
logGroupName=log_group_name, fromTime=fromTime, to=to, destination=destination
)
# taskId resembles a valid UUID (i.e. a string of 32 hexadecimal digits)
assert UUID(resp["taskId"])
assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
@mock_logs
def test_create_export_task_raises_ClientError_when_bucket_not_found(aws_credentials):
log_group_name = "/aws/codebuild/blah1"
destination = "368a7022dea3dd621"
fromTime = 1611316574
to = 1642852574
logs = boto3.client("logs", region_name="ap-southeast-1")
logs.create_log_group(logGroupName=log_group_name)
with pytest.raises(ClientError):
logs.create_export_task(
logGroupName=log_group_name,
fromTime=fromTime,
to=to,
destination=destination,
)
@mock_s3
@mock_logs
def test_create_export_raises_ResourceNotFoundException_log_group_not_found(
aws_credentials,
):
log_group_name = "/aws/codebuild/blah1"
destination = "mybucket"
fromTime = 1611316574
to = 1642852574
s3 = boto3.client("s3")
s3.create_bucket(Bucket=destination)
logs = boto3.client("logs", region_name="ap-southeast-1")
with pytest.raises(logs.exceptions.ResourceNotFoundException):
logs.create_export_task(
logGroupName=log_group_name,
fromTime=fromTime,
to=to,
destination=destination,
)