moto/tests/test_logs/test_logs.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

1345 lines
46 KiB
Python
Raw Normal View History

import json
import time
2021-10-18 19:44:29 +00:00
import sure # noqa # pylint: disable=unused-import
from datetime import timedelta, datetime
from uuid import UUID
Merge LocalStack changes into upstream moto (#4082) * fix OPTIONS requests on non-existing API GW integrations * add cloudformation models for API Gateway deployments * bump version * add backdoor to return CloudWatch metrics * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * bump version * minor fixes * fix Number data_type for SQS message attribute * fix handling of encoding errors * bump version * make CF stack queryable before starting to initialize its resources * bump version * fix integration_method for API GW method integrations * fix undefined status in CF FakeStack * Fix apigateway issues with terraform v0.12.21 * resource_methods -> add handle for "DELETE" method * integrations -> fix issue that "httpMethod" wasn't included in body request (this value was set as the value from refer method resource) * bump version * Fix setting http method for API gateway integrations (#6) * bump version * remove duplicate methods * add storage class to S3 Key when completing multipart upload (#7) * fix SQS performance issues; bump version * add pagination to SecretsManager list-secrets (#9) * fix default parameter groups in RDS * fix adding S3 metadata headers with names containing dots (#13) * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * make CF stack queryable before starting to initialize its resources * bump version * remove duplicate methods * fix adding S3 metadata headers with names containing dots (#13) * Update amis.json to support EKS AMI mocks (#15) * fix PascalCase for boolean value in ListMultipartUploads response (#17); fix _get_multi_param to parse nested list/dict query params * determine non-zero container exit code in Batch API * support filtering by dimensions in CW get_metric_statistics * fix storing attributes for ELBv2 Route entities; API GW refactorings for TF tests * add missing fields for API GW resources * fix error messages for Route53 (TF-compat) * various fixes for IAM resources (tf-compat) * minor fixes for API GW models (tf-compat) * minor fixes for API GW responses (tf-compat) * add s3 exception for bucket notification filter rule validation * change the way RESTErrors generate the response body and content-type header * fix lint errors and disable "black" syntax enforcement * remove return type hint in RESTError.get_body * add RESTError XML template for IAM exceptions * add support for API GW minimumCompressionSize * fix casing getting PrivateDnsEnabled API GW attribute * minor fixes for error responses * fix escaping special chars for IAM role descriptions (tf-compat) * minor fixes and tagging support for API GW and ELB v2 (tf-compat) * Merge branch 'master' into localstack * add "AlarmRule" attribute to enable support for composite CloudWatch metrics * fix recursive parsing of complex/nested query params * bump version * add API to delete S3 website configurations (#18) * use dict copy to allow parallelism and avoid concurrent modification exceptions in S3 * fix precondition check for etags in S3 (#19) * minor fix for user filtering in Cognito * fix API Gateway error response; avoid returning empty response templates (tf-compat) * support tags and tracingEnabled attribute for API GW stages * fix boolean value in S3 encryption response (#20) * fix connection arn structure * fix api destination arn structure * black format * release 2.0.3.37 * fix s3 exception tests see botocore/parsers.py:1002 where RequestId is removed from parsed * remove python 2 from build action * add test failure annotations in build action * fix events test arn comparisons * fix s3 encryption response test * return default value "0" if EC2 availableIpAddressCount is empty * fix extracting SecurityGroupIds for EC2 VPC endpoints * support deleting/updating API Gateway DomainNames * fix(events): Return empty string instead of null when no pattern is specified in EventPattern (tf-compat) (#22) * fix logic and revert CF changes to get tests running again (#21) * add support for EC2 customer gateway API (#25) * add support for EC2 Transit Gateway APIs (#24) * feat(logs): add `kmsKeyId` into `LogGroup` entity (#23) * minor change in ELBv2 logic to fix tests * feat(events): add APIs to describe and delete CloudWatch Events connections (#26) * add support for EC2 transit gateway route tables (#27) * pass transit gateway route table ID in Describe API, minor refactoring (#29) * add support for EC2 Transit Gateway Routes (#28) * fix region on ACM certificate import (#31) * add support for EC2 transit gateway attachments (#30) * add support for EC2 Transit Gateway VPN attachments (#32) * fix account ID for logs API * add support for DeleteOrganization API * feat(events): store raw filter representation for CloudWatch events patterns (tf-compat) (#36) * feat(events): add support to describe/update/delete CloudWatch API destinations (#35) * add Cognito UpdateIdentityPool, CW Logs PutResourcePolicy * feat(events): add support for tags in EventBus API (#38) * fix parameter validation for Batch compute environments (tf-compat) * revert merge conflicts in IMPLEMENTATION_COVERAGE.md * format code using black * restore original README; re-enable and fix CloudFormation tests * restore tests and old logic for CF stack parameters from SSM * parameterize RequestId/RequestID in response messages and revert related test changes * undo LocalStack-specific adaptations * minor fix * Update CodeCov config to reflect removal of Py2 * undo change related to CW metric filtering; add additional test for CW metric statistics with dimensions * Terraform - Extend whitelist of running tests Co-authored-by: acsbendi <acsbendi28@gmail.com> Co-authored-by: Phan Duong <duongpv@outlook.com> Co-authored-by: Thomas Rausch <thomas@thrau.at> Co-authored-by: Macwan Nevil <macnev2013@gmail.com> Co-authored-by: Dominik Schubert <dominik.schubert91@gmail.com> Co-authored-by: Gonzalo Saad <saad.gonzalo.ale@gmail.com> Co-authored-by: Mohit Alonja <monty16597@users.noreply.github.com> Co-authored-by: Miguel Gagliardo <migag9@gmail.com> Co-authored-by: Bert Blommers <info@bertblommers.nl>
2021-07-26 14:21:17 +00:00
import boto3
import pytest
Merge LocalStack changes into upstream moto (#4082) * fix OPTIONS requests on non-existing API GW integrations * add cloudformation models for API Gateway deployments * bump version * add backdoor to return CloudWatch metrics * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * bump version * minor fixes * fix Number data_type for SQS message attribute * fix handling of encoding errors * bump version * make CF stack queryable before starting to initialize its resources * bump version * fix integration_method for API GW method integrations * fix undefined status in CF FakeStack * Fix apigateway issues with terraform v0.12.21 * resource_methods -> add handle for "DELETE" method * integrations -> fix issue that "httpMethod" wasn't included in body request (this value was set as the value from refer method resource) * bump version * Fix setting http method for API gateway integrations (#6) * bump version * remove duplicate methods * add storage class to S3 Key when completing multipart upload (#7) * fix SQS performance issues; bump version * add pagination to SecretsManager list-secrets (#9) * fix default parameter groups in RDS * fix adding S3 metadata headers with names containing dots (#13) * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * make CF stack queryable before starting to initialize its resources * bump version * remove duplicate methods * fix adding S3 metadata headers with names containing dots (#13) * Update amis.json to support EKS AMI mocks (#15) * fix PascalCase for boolean value in ListMultipartUploads response (#17); fix _get_multi_param to parse nested list/dict query params * determine non-zero container exit code in Batch API * support filtering by dimensions in CW get_metric_statistics * fix storing attributes for ELBv2 Route entities; API GW refactorings for TF tests * add missing fields for API GW resources * fix error messages for Route53 (TF-compat) * various fixes for IAM resources (tf-compat) * minor fixes for API GW models (tf-compat) * minor fixes for API GW responses (tf-compat) * add s3 exception for bucket notification filter rule validation * change the way RESTErrors generate the response body and content-type header * fix lint errors and disable "black" syntax enforcement * remove return type hint in RESTError.get_body * add RESTError XML template for IAM exceptions * add support for API GW minimumCompressionSize * fix casing getting PrivateDnsEnabled API GW attribute * minor fixes for error responses * fix escaping special chars for IAM role descriptions (tf-compat) * minor fixes and tagging support for API GW and ELB v2 (tf-compat) * Merge branch 'master' into localstack * add "AlarmRule" attribute to enable support for composite CloudWatch metrics * fix recursive parsing of complex/nested query params * bump version * add API to delete S3 website configurations (#18) * use dict copy to allow parallelism and avoid concurrent modification exceptions in S3 * fix precondition check for etags in S3 (#19) * minor fix for user filtering in Cognito * fix API Gateway error response; avoid returning empty response templates (tf-compat) * support tags and tracingEnabled attribute for API GW stages * fix boolean value in S3 encryption response (#20) * fix connection arn structure * fix api destination arn structure * black format * release 2.0.3.37 * fix s3 exception tests see botocore/parsers.py:1002 where RequestId is removed from parsed * remove python 2 from build action * add test failure annotations in build action * fix events test arn comparisons * fix s3 encryption response test * return default value "0" if EC2 availableIpAddressCount is empty * fix extracting SecurityGroupIds for EC2 VPC endpoints * support deleting/updating API Gateway DomainNames * fix(events): Return empty string instead of null when no pattern is specified in EventPattern (tf-compat) (#22) * fix logic and revert CF changes to get tests running again (#21) * add support for EC2 customer gateway API (#25) * add support for EC2 Transit Gateway APIs (#24) * feat(logs): add `kmsKeyId` into `LogGroup` entity (#23) * minor change in ELBv2 logic to fix tests * feat(events): add APIs to describe and delete CloudWatch Events connections (#26) * add support for EC2 transit gateway route tables (#27) * pass transit gateway route table ID in Describe API, minor refactoring (#29) * add support for EC2 Transit Gateway Routes (#28) * fix region on ACM certificate import (#31) * add support for EC2 transit gateway attachments (#30) * add support for EC2 Transit Gateway VPN attachments (#32) * fix account ID for logs API * add support for DeleteOrganization API * feat(events): store raw filter representation for CloudWatch events patterns (tf-compat) (#36) * feat(events): add support to describe/update/delete CloudWatch API destinations (#35) * add Cognito UpdateIdentityPool, CW Logs PutResourcePolicy * feat(events): add support for tags in EventBus API (#38) * fix parameter validation for Batch compute environments (tf-compat) * revert merge conflicts in IMPLEMENTATION_COVERAGE.md * format code using black * restore original README; re-enable and fix CloudFormation tests * restore tests and old logic for CF stack parameters from SSM * parameterize RequestId/RequestID in response messages and revert related test changes * undo LocalStack-specific adaptations * minor fix * Update CodeCov config to reflect removal of Py2 * undo change related to CW metric filtering; add additional test for CW metric statistics with dimensions * Terraform - Extend whitelist of running tests Co-authored-by: acsbendi <acsbendi28@gmail.com> Co-authored-by: Phan Duong <duongpv@outlook.com> Co-authored-by: Thomas Rausch <thomas@thrau.at> Co-authored-by: Macwan Nevil <macnev2013@gmail.com> Co-authored-by: Dominik Schubert <dominik.schubert91@gmail.com> Co-authored-by: Gonzalo Saad <saad.gonzalo.ale@gmail.com> Co-authored-by: Mohit Alonja <monty16597@users.noreply.github.com> Co-authored-by: Miguel Gagliardo <migag9@gmail.com> Co-authored-by: Bert Blommers <info@bertblommers.nl>
2021-07-26 14:21:17 +00:00
from botocore.exceptions import ClientError
from freezegun import freeze_time
lambda + SNS enhancements (#1048) * updates - support lambda messages from SNS - run lambda in docker container * decode output * populate timeout * simplify * whoops * skeletons of cloudwatchlogs * impl filter log streams * fix logging * PEP fixes * PEP fixes * fix reset * fix reset * add new endpoint * fix region name * add docker * try to fix tests * try to fix travis issue with boto * fix escaping in urls * fix environment variables * fix PEP * more pep * switch back to precise * another fix attempt * fix typo * fix lambda invoke * fix more unittests * work on getting this to work in new scheme * fix py2 * fix error * fix tests when running in server mode * more lambda fixes * try running with latest docker adapted from aiodocker * switch to docker python client * pep fixes * switch to docker volume * fix unittest * fix invoke from sns * fix zip2tar * add hack impl for get_function with zip * try fix * fix for py < 3.6 * add volume refcount * try to fix travis * docker test * fix yaml * try fix * update endpoints * fix * another attempt * try again * fix recursive import * refactor fix * revert changes with better fix * more reverts * wait for service to come up * add back detached mode * sleep and add another exception type * put this back for logging * put back with note * whoops :) * docker in docker! * fix invalid url * hopefully last fix! * fix lambda regions * fix protocol * travis!!!! * just run lambda test for now * use one print * fix escaping * another attempt * yet another * re-enable all tests * fixes * fix for py2 * revert change * fix for py2.7 * fix output ordering * remove this given there's a new unittest that covers it * changes based on review - add skeleton logs test file - switch to docker image that matches test env - fix mock_logs import * add readme entry
2017-09-27 23:04:58 +00:00
from moto import mock_logs, mock_s3, settings
from moto.core.utils import unix_time_millis
from moto.logs.models import MAX_RESOURCE_POLICIES_PER_REGION
lambda + SNS enhancements (#1048) * updates - support lambda messages from SNS - run lambda in docker container * decode output * populate timeout * simplify * whoops * skeletons of cloudwatchlogs * impl filter log streams * fix logging * PEP fixes * PEP fixes * fix reset * fix reset * add new endpoint * fix region name * add docker * try to fix tests * try to fix travis issue with boto * fix escaping in urls * fix environment variables * fix PEP * more pep * switch back to precise * another fix attempt * fix typo * fix lambda invoke * fix more unittests * work on getting this to work in new scheme * fix py2 * fix error * fix tests when running in server mode * more lambda fixes * try running with latest docker adapted from aiodocker * switch to docker python client * pep fixes * switch to docker volume * fix unittest * fix invoke from sns * fix zip2tar * add hack impl for get_function with zip * try fix * fix for py < 3.6 * add volume refcount * try to fix travis * docker test * fix yaml * try fix * update endpoints * fix * another attempt * try again * fix recursive import * refactor fix * revert changes with better fix * more reverts * wait for service to come up * add back detached mode * sleep and add another exception type * put this back for logging * put back with note * whoops :) * docker in docker! * fix invalid url * hopefully last fix! * fix lambda regions * fix protocol * travis!!!! * just run lambda test for now * use one print * fix escaping * another attempt * yet another * re-enable all tests * fixes * fix for py2 * revert change * fix for py2.7 * fix output ordering * remove this given there's a new unittest that covers it * changes based on review - add skeleton logs test file - switch to docker image that matches test env - fix mock_logs import * add readme entry
2017-09-27 23:04:58 +00:00
TEST_REGION = "us-east-1" if settings.TEST_SERVER_MODE else "us-west-2"
"""Returns a policy document in JSON format.
The ARN is bogus, but that shouldn't matter for the test.
"""
json_policy_doc = json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "Route53LogsToCloudWatchLogs",
"Effect": "Allow",
"Principal": {"Service": ["route53.amazonaws.com"]},
"Action": "logs:PutLogEvents",
"Resource": "log_arn",
}
],
}
)
lambda + SNS enhancements (#1048) * updates - support lambda messages from SNS - run lambda in docker container * decode output * populate timeout * simplify * whoops * skeletons of cloudwatchlogs * impl filter log streams * fix logging * PEP fixes * PEP fixes * fix reset * fix reset * add new endpoint * fix region name * add docker * try to fix tests * try to fix travis issue with boto * fix escaping in urls * fix environment variables * fix PEP * more pep * switch back to precise * another fix attempt * fix typo * fix lambda invoke * fix more unittests * work on getting this to work in new scheme * fix py2 * fix error * fix tests when running in server mode * more lambda fixes * try running with latest docker adapted from aiodocker * switch to docker python client * pep fixes * switch to docker volume * fix unittest * fix invoke from sns * fix zip2tar * add hack impl for get_function with zip * try fix * fix for py < 3.6 * add volume refcount * try to fix travis * docker test * fix yaml * try fix * update endpoints * fix * another attempt * try again * fix recursive import * refactor fix * revert changes with better fix * more reverts * wait for service to come up * add back detached mode * sleep and add another exception type * put this back for logging * put back with note * whoops :) * docker in docker! * fix invalid url * hopefully last fix! * fix lambda regions * fix protocol * travis!!!! * just run lambda test for now * use one print * fix escaping * another attempt * yet another * re-enable all tests * fixes * fix for py2 * revert change * fix for py2.7 * fix output ordering * remove this given there's a new unittest that covers it * changes based on review - add skeleton logs test file - switch to docker image that matches test env - fix mock_logs import * add readme entry
2017-09-27 23:04:58 +00:00
@mock_logs
def test_describe_metric_filters_happy_prefix():
conn = boto3.client("logs", "us-west-2")
response1 = put_metric_filter(conn, count=1)
assert response1["ResponseMetadata"]["HTTPStatusCode"] == 200
response2 = put_metric_filter(conn, count=2)
assert response2["ResponseMetadata"]["HTTPStatusCode"] == 200
response = conn.describe_metric_filters(filterNamePrefix="filter")
assert len(response["metricFilters"]) == 2
assert response["metricFilters"][0]["filterName"] == "filterName1"
assert response["metricFilters"][1]["filterName"] == "filterName2"
@mock_logs
def test_describe_metric_filters_happy_log_group_name():
conn = boto3.client("logs", "us-west-2")
response1 = put_metric_filter(conn, count=1)
assert response1["ResponseMetadata"]["HTTPStatusCode"] == 200
response2 = put_metric_filter(conn, count=2)
assert response2["ResponseMetadata"]["HTTPStatusCode"] == 200
response = conn.describe_metric_filters(logGroupName="logGroupName2")
assert len(response["metricFilters"]) == 1
assert response["metricFilters"][0]["logGroupName"] == "logGroupName2"
@mock_logs
def test_describe_metric_filters_happy_metric_name():
conn = boto3.client("logs", "us-west-2")
response1 = put_metric_filter(conn, count=1)
assert response1["ResponseMetadata"]["HTTPStatusCode"] == 200
response2 = put_metric_filter(conn, count=2)
assert response2["ResponseMetadata"]["HTTPStatusCode"] == 200
response = conn.describe_metric_filters(
metricName="metricName1", metricNamespace="metricNamespace1"
)
assert len(response["metricFilters"]) == 1
metrics = response["metricFilters"][0]["metricTransformations"]
assert metrics[0]["metricName"] == "metricName1"
assert metrics[0]["metricNamespace"] == "metricNamespace1"
@mock_logs
def test_put_metric_filters_validation():
conn = boto3.client("logs", "us-west-2")
invalid_filter_name = "X" * 513
invalid_filter_pattern = "X" * 1025
invalid_metric_transformations = [
{
"defaultValue": 1,
"metricName": "metricName",
"metricNamespace": "metricNamespace",
"metricValue": "metricValue",
},
{
"defaultValue": 1,
"metricName": "metricName",
"metricNamespace": "metricNamespace",
"metricValue": "metricValue",
},
]
test_cases = [
build_put_case(name="Invalid filter name", filter_name=invalid_filter_name),
build_put_case(
name="Invalid filter pattern", filter_pattern=invalid_filter_pattern
),
build_put_case(
name="Invalid filter metric transformations",
metric_transformations=invalid_metric_transformations,
),
]
for test_case in test_cases:
with pytest.raises(ClientError) as exc:
conn.put_metric_filter(**test_case["input"])
response = exc.value.response
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
response["Error"]["Code"].should.equal("InvalidParameterException")
@mock_logs
def test_describe_metric_filters_validation():
conn = boto3.client("logs", "us-west-2")
length_over_512 = "X" * 513
length_over_255 = "X" * 256
test_cases = [
build_describe_case(
name="Invalid filter name prefix", filter_name_prefix=length_over_512
),
build_describe_case(
name="Invalid log group name", log_group_name=length_over_512
),
build_describe_case(name="Invalid metric name", metric_name=length_over_255),
build_describe_case(
name="Invalid metric namespace", metric_namespace=length_over_255
),
]
for test_case in test_cases:
with pytest.raises(ClientError) as exc:
conn.describe_metric_filters(**test_case["input"])
response = exc.value.response
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
response["Error"]["Code"].should.equal("InvalidParameterException")
@mock_logs
def test_describe_metric_filters_multiple_happy():
conn = boto3.client("logs", "us-west-2")
response = put_metric_filter(conn, 1)
assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
response = put_metric_filter(conn, 2)
assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
response = conn.describe_metric_filters(
filterNamePrefix="filter", logGroupName="logGroupName1"
)
assert response["metricFilters"][0]["filterName"] == "filterName1"
response = conn.describe_metric_filters(filterNamePrefix="filter")
assert response["metricFilters"][0]["filterName"] == "filterName1"
response = conn.describe_metric_filters(logGroupName="logGroupName1")
assert response["metricFilters"][0]["filterName"] == "filterName1"
@mock_logs
def test_delete_metric_filter():
client = boto3.client("logs", "us-west-2")
lg_name = "/hello-world/my-cool-endpoint"
client.create_log_group(logGroupName=lg_name)
client.put_metric_filter(
logGroupName=lg_name,
filterName="my-cool-filter",
filterPattern="{ $.val = * }",
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
response = client.delete_metric_filter(
filterName="filterName", logGroupName=lg_name
)
assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
response = client.describe_metric_filters(
filterNamePrefix="filter", logGroupName="logGroupName2"
)
response.should.have.key("metricFilters").equals([])
@mock_logs
@pytest.mark.parametrize(
"filter_name, failing_constraint",
[
(
"X" * 513,
"Minimum length of 1. Maximum length of 512.",
), # filterName too long
("x:x", "Must match pattern"), # invalid filterName pattern
],
)
def test_delete_metric_filter_invalid_filter_name(filter_name, failing_constraint):
conn = boto3.client("logs", "us-west-2")
with pytest.raises(ClientError) as exc:
conn.delete_metric_filter(filterName=filter_name, logGroupName="valid")
response = exc.value.response
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
response["Error"]["Code"].should.equal("InvalidParameterException")
response["Error"]["Message"].should.contain(
f"Value '{filter_name}' at 'filterName' failed to satisfy constraint"
)
response["Error"]["Message"].should.contain(failing_constraint)
@mock_logs
@pytest.mark.parametrize(
"log_group_name, failing_constraint",
[
(
"X" * 513,
"Minimum length of 1. Maximum length of 512.",
), # logGroupName too long
("x!x", "Must match pattern"), # invalid logGroupName pattern
],
)
def test_delete_metric_filter_invalid_log_group_name(
log_group_name, failing_constraint
):
conn = boto3.client("logs", "us-west-2")
with pytest.raises(ClientError) as exc:
conn.delete_metric_filter(filterName="valid", logGroupName=log_group_name)
response = exc.value.response
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
response["Error"]["Code"].should.equal("InvalidParameterException")
response["Error"]["Message"].should.contain(
f"Value '{log_group_name}' at 'logGroupName' failed to satisfy constraint"
)
response["Error"]["Message"].should.contain(failing_constraint)
def put_metric_filter(conn, count=1):
count = str(count)
return conn.put_metric_filter(
filterName="filterName" + count,
filterPattern="filterPattern" + count,
logGroupName="logGroupName" + count,
metricTransformations=[
{
"defaultValue": int(count),
"metricName": "metricName" + count,
"metricNamespace": "metricNamespace" + count,
"metricValue": "metricValue" + count,
},
],
)
def build_put_case(
name,
filter_name="filterName",
filter_pattern="filterPattern",
log_group_name="logGroupName",
metric_transformations=None,
):
return {
"name": name,
"input": build_put_input(
filter_name, filter_pattern, log_group_name, metric_transformations
),
}
def build_put_input(
filter_name, filter_pattern, log_group_name, metric_transformations
):
if metric_transformations is None:
metric_transformations = [
{
"defaultValue": 1,
"metricName": "metricName",
"metricNamespace": "metricNamespace",
"metricValue": "metricValue",
},
]
return {
"filterName": filter_name,
"filterPattern": filter_pattern,
"logGroupName": log_group_name,
"metricTransformations": metric_transformations,
}
def build_describe_input(
filter_name_prefix, log_group_name, metric_name, metric_namespace
):
return {
"filterNamePrefix": filter_name_prefix,
"logGroupName": log_group_name,
"metricName": metric_name,
"metricNamespace": metric_namespace,
}
def build_describe_case(
name,
filter_name_prefix="filterNamePrefix",
log_group_name="logGroupName",
metric_name="metricName",
metric_namespace="metricNamespace",
):
return {
"name": name,
"input": build_describe_input(
filter_name_prefix, log_group_name, metric_name, metric_namespace
),
}
lambda + SNS enhancements (#1048) * updates - support lambda messages from SNS - run lambda in docker container * decode output * populate timeout * simplify * whoops * skeletons of cloudwatchlogs * impl filter log streams * fix logging * PEP fixes * PEP fixes * fix reset * fix reset * add new endpoint * fix region name * add docker * try to fix tests * try to fix travis issue with boto * fix escaping in urls * fix environment variables * fix PEP * more pep * switch back to precise * another fix attempt * fix typo * fix lambda invoke * fix more unittests * work on getting this to work in new scheme * fix py2 * fix error * fix tests when running in server mode * more lambda fixes * try running with latest docker adapted from aiodocker * switch to docker python client * pep fixes * switch to docker volume * fix unittest * fix invoke from sns * fix zip2tar * add hack impl for get_function with zip * try fix * fix for py < 3.6 * add volume refcount * try to fix travis * docker test * fix yaml * try fix * update endpoints * fix * another attempt * try again * fix recursive import * refactor fix * revert changes with better fix * more reverts * wait for service to come up * add back detached mode * sleep and add another exception type * put this back for logging * put back with note * whoops :) * docker in docker! * fix invalid url * hopefully last fix! * fix lambda regions * fix protocol * travis!!!! * just run lambda test for now * use one print * fix escaping * another attempt * yet another * re-enable all tests * fixes * fix for py2 * revert change * fix for py2.7 * fix output ordering * remove this given there's a new unittest that covers it * changes based on review - add skeleton logs test file - switch to docker image that matches test env - fix mock_logs import * add readme entry
2017-09-27 23:04:58 +00:00
@mock_logs
Merge LocalStack changes into upstream moto (#4082) * fix OPTIONS requests on non-existing API GW integrations * add cloudformation models for API Gateway deployments * bump version * add backdoor to return CloudWatch metrics * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * bump version * minor fixes * fix Number data_type for SQS message attribute * fix handling of encoding errors * bump version * make CF stack queryable before starting to initialize its resources * bump version * fix integration_method for API GW method integrations * fix undefined status in CF FakeStack * Fix apigateway issues with terraform v0.12.21 * resource_methods -> add handle for "DELETE" method * integrations -> fix issue that "httpMethod" wasn't included in body request (this value was set as the value from refer method resource) * bump version * Fix setting http method for API gateway integrations (#6) * bump version * remove duplicate methods * add storage class to S3 Key when completing multipart upload (#7) * fix SQS performance issues; bump version * add pagination to SecretsManager list-secrets (#9) * fix default parameter groups in RDS * fix adding S3 metadata headers with names containing dots (#13) * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * make CF stack queryable before starting to initialize its resources * bump version * remove duplicate methods * fix adding S3 metadata headers with names containing dots (#13) * Update amis.json to support EKS AMI mocks (#15) * fix PascalCase for boolean value in ListMultipartUploads response (#17); fix _get_multi_param to parse nested list/dict query params * determine non-zero container exit code in Batch API * support filtering by dimensions in CW get_metric_statistics * fix storing attributes for ELBv2 Route entities; API GW refactorings for TF tests * add missing fields for API GW resources * fix error messages for Route53 (TF-compat) * various fixes for IAM resources (tf-compat) * minor fixes for API GW models (tf-compat) * minor fixes for API GW responses (tf-compat) * add s3 exception for bucket notification filter rule validation * change the way RESTErrors generate the response body and content-type header * fix lint errors and disable "black" syntax enforcement * remove return type hint in RESTError.get_body * add RESTError XML template for IAM exceptions * add support for API GW minimumCompressionSize * fix casing getting PrivateDnsEnabled API GW attribute * minor fixes for error responses * fix escaping special chars for IAM role descriptions (tf-compat) * minor fixes and tagging support for API GW and ELB v2 (tf-compat) * Merge branch 'master' into localstack * add "AlarmRule" attribute to enable support for composite CloudWatch metrics * fix recursive parsing of complex/nested query params * bump version * add API to delete S3 website configurations (#18) * use dict copy to allow parallelism and avoid concurrent modification exceptions in S3 * fix precondition check for etags in S3 (#19) * minor fix for user filtering in Cognito * fix API Gateway error response; avoid returning empty response templates (tf-compat) * support tags and tracingEnabled attribute for API GW stages * fix boolean value in S3 encryption response (#20) * fix connection arn structure * fix api destination arn structure * black format * release 2.0.3.37 * fix s3 exception tests see botocore/parsers.py:1002 where RequestId is removed from parsed * remove python 2 from build action * add test failure annotations in build action * fix events test arn comparisons * fix s3 encryption response test * return default value "0" if EC2 availableIpAddressCount is empty * fix extracting SecurityGroupIds for EC2 VPC endpoints * support deleting/updating API Gateway DomainNames * fix(events): Return empty string instead of null when no pattern is specified in EventPattern (tf-compat) (#22) * fix logic and revert CF changes to get tests running again (#21) * add support for EC2 customer gateway API (#25) * add support for EC2 Transit Gateway APIs (#24) * feat(logs): add `kmsKeyId` into `LogGroup` entity (#23) * minor change in ELBv2 logic to fix tests * feat(events): add APIs to describe and delete CloudWatch Events connections (#26) * add support for EC2 transit gateway route tables (#27) * pass transit gateway route table ID in Describe API, minor refactoring (#29) * add support for EC2 Transit Gateway Routes (#28) * fix region on ACM certificate import (#31) * add support for EC2 transit gateway attachments (#30) * add support for EC2 Transit Gateway VPN attachments (#32) * fix account ID for logs API * add support for DeleteOrganization API * feat(events): store raw filter representation for CloudWatch events patterns (tf-compat) (#36) * feat(events): add support to describe/update/delete CloudWatch API destinations (#35) * add Cognito UpdateIdentityPool, CW Logs PutResourcePolicy * feat(events): add support for tags in EventBus API (#38) * fix parameter validation for Batch compute environments (tf-compat) * revert merge conflicts in IMPLEMENTATION_COVERAGE.md * format code using black * restore original README; re-enable and fix CloudFormation tests * restore tests and old logic for CF stack parameters from SSM * parameterize RequestId/RequestID in response messages and revert related test changes * undo LocalStack-specific adaptations * minor fix * Update CodeCov config to reflect removal of Py2 * undo change related to CW metric filtering; add additional test for CW metric statistics with dimensions * Terraform - Extend whitelist of running tests Co-authored-by: acsbendi <acsbendi28@gmail.com> Co-authored-by: Phan Duong <duongpv@outlook.com> Co-authored-by: Thomas Rausch <thomas@thrau.at> Co-authored-by: Macwan Nevil <macnev2013@gmail.com> Co-authored-by: Dominik Schubert <dominik.schubert91@gmail.com> Co-authored-by: Gonzalo Saad <saad.gonzalo.ale@gmail.com> Co-authored-by: Mohit Alonja <monty16597@users.noreply.github.com> Co-authored-by: Miguel Gagliardo <migag9@gmail.com> Co-authored-by: Bert Blommers <info@bertblommers.nl>
2021-07-26 14:21:17 +00:00
@pytest.mark.parametrize(
"kms_key_id",
[
"arn:aws:kms:us-east-1:000000000000:key/51d81fab-b138-4bd2-8a09-07fd6d37224d",
None,
],
)
def test_create_log_group(kms_key_id):
# Given
conn = boto3.client("logs", TEST_REGION)
Merge LocalStack changes into upstream moto (#4082) * fix OPTIONS requests on non-existing API GW integrations * add cloudformation models for API Gateway deployments * bump version * add backdoor to return CloudWatch metrics * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * bump version * minor fixes * fix Number data_type for SQS message attribute * fix handling of encoding errors * bump version * make CF stack queryable before starting to initialize its resources * bump version * fix integration_method for API GW method integrations * fix undefined status in CF FakeStack * Fix apigateway issues with terraform v0.12.21 * resource_methods -> add handle for "DELETE" method * integrations -> fix issue that "httpMethod" wasn't included in body request (this value was set as the value from refer method resource) * bump version * Fix setting http method for API gateway integrations (#6) * bump version * remove duplicate methods * add storage class to S3 Key when completing multipart upload (#7) * fix SQS performance issues; bump version * add pagination to SecretsManager list-secrets (#9) * fix default parameter groups in RDS * fix adding S3 metadata headers with names containing dots (#13) * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * make CF stack queryable before starting to initialize its resources * bump version * remove duplicate methods * fix adding S3 metadata headers with names containing dots (#13) * Update amis.json to support EKS AMI mocks (#15) * fix PascalCase for boolean value in ListMultipartUploads response (#17); fix _get_multi_param to parse nested list/dict query params * determine non-zero container exit code in Batch API * support filtering by dimensions in CW get_metric_statistics * fix storing attributes for ELBv2 Route entities; API GW refactorings for TF tests * add missing fields for API GW resources * fix error messages for Route53 (TF-compat) * various fixes for IAM resources (tf-compat) * minor fixes for API GW models (tf-compat) * minor fixes for API GW responses (tf-compat) * add s3 exception for bucket notification filter rule validation * change the way RESTErrors generate the response body and content-type header * fix lint errors and disable "black" syntax enforcement * remove return type hint in RESTError.get_body * add RESTError XML template for IAM exceptions * add support for API GW minimumCompressionSize * fix casing getting PrivateDnsEnabled API GW attribute * minor fixes for error responses * fix escaping special chars for IAM role descriptions (tf-compat) * minor fixes and tagging support for API GW and ELB v2 (tf-compat) * Merge branch 'master' into localstack * add "AlarmRule" attribute to enable support for composite CloudWatch metrics * fix recursive parsing of complex/nested query params * bump version * add API to delete S3 website configurations (#18) * use dict copy to allow parallelism and avoid concurrent modification exceptions in S3 * fix precondition check for etags in S3 (#19) * minor fix for user filtering in Cognito * fix API Gateway error response; avoid returning empty response templates (tf-compat) * support tags and tracingEnabled attribute for API GW stages * fix boolean value in S3 encryption response (#20) * fix connection arn structure * fix api destination arn structure * black format * release 2.0.3.37 * fix s3 exception tests see botocore/parsers.py:1002 where RequestId is removed from parsed * remove python 2 from build action * add test failure annotations in build action * fix events test arn comparisons * fix s3 encryption response test * return default value "0" if EC2 availableIpAddressCount is empty * fix extracting SecurityGroupIds for EC2 VPC endpoints * support deleting/updating API Gateway DomainNames * fix(events): Return empty string instead of null when no pattern is specified in EventPattern (tf-compat) (#22) * fix logic and revert CF changes to get tests running again (#21) * add support for EC2 customer gateway API (#25) * add support for EC2 Transit Gateway APIs (#24) * feat(logs): add `kmsKeyId` into `LogGroup` entity (#23) * minor change in ELBv2 logic to fix tests * feat(events): add APIs to describe and delete CloudWatch Events connections (#26) * add support for EC2 transit gateway route tables (#27) * pass transit gateway route table ID in Describe API, minor refactoring (#29) * add support for EC2 Transit Gateway Routes (#28) * fix region on ACM certificate import (#31) * add support for EC2 transit gateway attachments (#30) * add support for EC2 Transit Gateway VPN attachments (#32) * fix account ID for logs API * add support for DeleteOrganization API * feat(events): store raw filter representation for CloudWatch events patterns (tf-compat) (#36) * feat(events): add support to describe/update/delete CloudWatch API destinations (#35) * add Cognito UpdateIdentityPool, CW Logs PutResourcePolicy * feat(events): add support for tags in EventBus API (#38) * fix parameter validation for Batch compute environments (tf-compat) * revert merge conflicts in IMPLEMENTATION_COVERAGE.md * format code using black * restore original README; re-enable and fix CloudFormation tests * restore tests and old logic for CF stack parameters from SSM * parameterize RequestId/RequestID in response messages and revert related test changes * undo LocalStack-specific adaptations * minor fix * Update CodeCov config to reflect removal of Py2 * undo change related to CW metric filtering; add additional test for CW metric statistics with dimensions * Terraform - Extend whitelist of running tests Co-authored-by: acsbendi <acsbendi28@gmail.com> Co-authored-by: Phan Duong <duongpv@outlook.com> Co-authored-by: Thomas Rausch <thomas@thrau.at> Co-authored-by: Macwan Nevil <macnev2013@gmail.com> Co-authored-by: Dominik Schubert <dominik.schubert91@gmail.com> Co-authored-by: Gonzalo Saad <saad.gonzalo.ale@gmail.com> Co-authored-by: Mohit Alonja <monty16597@users.noreply.github.com> Co-authored-by: Miguel Gagliardo <migag9@gmail.com> Co-authored-by: Bert Blommers <info@bertblommers.nl>
2021-07-26 14:21:17 +00:00
create_logs_params = dict(logGroupName="dummy")
if kms_key_id:
create_logs_params["kmsKeyId"] = kms_key_id
# When
response = conn.create_log_group(**create_logs_params)
response = conn.describe_log_groups()
Merge LocalStack changes into upstream moto (#4082) * fix OPTIONS requests on non-existing API GW integrations * add cloudformation models for API Gateway deployments * bump version * add backdoor to return CloudWatch metrics * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * bump version * minor fixes * fix Number data_type for SQS message attribute * fix handling of encoding errors * bump version * make CF stack queryable before starting to initialize its resources * bump version * fix integration_method for API GW method integrations * fix undefined status in CF FakeStack * Fix apigateway issues with terraform v0.12.21 * resource_methods -> add handle for "DELETE" method * integrations -> fix issue that "httpMethod" wasn't included in body request (this value was set as the value from refer method resource) * bump version * Fix setting http method for API gateway integrations (#6) * bump version * remove duplicate methods * add storage class to S3 Key when completing multipart upload (#7) * fix SQS performance issues; bump version * add pagination to SecretsManager list-secrets (#9) * fix default parameter groups in RDS * fix adding S3 metadata headers with names containing dots (#13) * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * make CF stack queryable before starting to initialize its resources * bump version * remove duplicate methods * fix adding S3 metadata headers with names containing dots (#13) * Update amis.json to support EKS AMI mocks (#15) * fix PascalCase for boolean value in ListMultipartUploads response (#17); fix _get_multi_param to parse nested list/dict query params * determine non-zero container exit code in Batch API * support filtering by dimensions in CW get_metric_statistics * fix storing attributes for ELBv2 Route entities; API GW refactorings for TF tests * add missing fields for API GW resources * fix error messages for Route53 (TF-compat) * various fixes for IAM resources (tf-compat) * minor fixes for API GW models (tf-compat) * minor fixes for API GW responses (tf-compat) * add s3 exception for bucket notification filter rule validation * change the way RESTErrors generate the response body and content-type header * fix lint errors and disable "black" syntax enforcement * remove return type hint in RESTError.get_body * add RESTError XML template for IAM exceptions * add support for API GW minimumCompressionSize * fix casing getting PrivateDnsEnabled API GW attribute * minor fixes for error responses * fix escaping special chars for IAM role descriptions (tf-compat) * minor fixes and tagging support for API GW and ELB v2 (tf-compat) * Merge branch 'master' into localstack * add "AlarmRule" attribute to enable support for composite CloudWatch metrics * fix recursive parsing of complex/nested query params * bump version * add API to delete S3 website configurations (#18) * use dict copy to allow parallelism and avoid concurrent modification exceptions in S3 * fix precondition check for etags in S3 (#19) * minor fix for user filtering in Cognito * fix API Gateway error response; avoid returning empty response templates (tf-compat) * support tags and tracingEnabled attribute for API GW stages * fix boolean value in S3 encryption response (#20) * fix connection arn structure * fix api destination arn structure * black format * release 2.0.3.37 * fix s3 exception tests see botocore/parsers.py:1002 where RequestId is removed from parsed * remove python 2 from build action * add test failure annotations in build action * fix events test arn comparisons * fix s3 encryption response test * return default value "0" if EC2 availableIpAddressCount is empty * fix extracting SecurityGroupIds for EC2 VPC endpoints * support deleting/updating API Gateway DomainNames * fix(events): Return empty string instead of null when no pattern is specified in EventPattern (tf-compat) (#22) * fix logic and revert CF changes to get tests running again (#21) * add support for EC2 customer gateway API (#25) * add support for EC2 Transit Gateway APIs (#24) * feat(logs): add `kmsKeyId` into `LogGroup` entity (#23) * minor change in ELBv2 logic to fix tests * feat(events): add APIs to describe and delete CloudWatch Events connections (#26) * add support for EC2 transit gateway route tables (#27) * pass transit gateway route table ID in Describe API, minor refactoring (#29) * add support for EC2 Transit Gateway Routes (#28) * fix region on ACM certificate import (#31) * add support for EC2 transit gateway attachments (#30) * add support for EC2 Transit Gateway VPN attachments (#32) * fix account ID for logs API * add support for DeleteOrganization API * feat(events): store raw filter representation for CloudWatch events patterns (tf-compat) (#36) * feat(events): add support to describe/update/delete CloudWatch API destinations (#35) * add Cognito UpdateIdentityPool, CW Logs PutResourcePolicy * feat(events): add support for tags in EventBus API (#38) * fix parameter validation for Batch compute environments (tf-compat) * revert merge conflicts in IMPLEMENTATION_COVERAGE.md * format code using black * restore original README; re-enable and fix CloudFormation tests * restore tests and old logic for CF stack parameters from SSM * parameterize RequestId/RequestID in response messages and revert related test changes * undo LocalStack-specific adaptations * minor fix * Update CodeCov config to reflect removal of Py2 * undo change related to CW metric filtering; add additional test for CW metric statistics with dimensions * Terraform - Extend whitelist of running tests Co-authored-by: acsbendi <acsbendi28@gmail.com> Co-authored-by: Phan Duong <duongpv@outlook.com> Co-authored-by: Thomas Rausch <thomas@thrau.at> Co-authored-by: Macwan Nevil <macnev2013@gmail.com> Co-authored-by: Dominik Schubert <dominik.schubert91@gmail.com> Co-authored-by: Gonzalo Saad <saad.gonzalo.ale@gmail.com> Co-authored-by: Mohit Alonja <monty16597@users.noreply.github.com> Co-authored-by: Miguel Gagliardo <migag9@gmail.com> Co-authored-by: Bert Blommers <info@bertblommers.nl>
2021-07-26 14:21:17 +00:00
# Then
response["logGroups"].should.have.length_of(1)
Merge LocalStack changes into upstream moto (#4082) * fix OPTIONS requests on non-existing API GW integrations * add cloudformation models for API Gateway deployments * bump version * add backdoor to return CloudWatch metrics * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * bump version * minor fixes * fix Number data_type for SQS message attribute * fix handling of encoding errors * bump version * make CF stack queryable before starting to initialize its resources * bump version * fix integration_method for API GW method integrations * fix undefined status in CF FakeStack * Fix apigateway issues with terraform v0.12.21 * resource_methods -> add handle for "DELETE" method * integrations -> fix issue that "httpMethod" wasn't included in body request (this value was set as the value from refer method resource) * bump version * Fix setting http method for API gateway integrations (#6) * bump version * remove duplicate methods * add storage class to S3 Key when completing multipart upload (#7) * fix SQS performance issues; bump version * add pagination to SecretsManager list-secrets (#9) * fix default parameter groups in RDS * fix adding S3 metadata headers with names containing dots (#13) * Updating implementation coverage * Updating implementation coverage * add cloudformation models for API Gateway deployments * Updating implementation coverage * Updating implementation coverage * Implemented get-caller-identity returning real data depending on the access key used. * make CF stack queryable before starting to initialize its resources * bump version * remove duplicate methods * fix adding S3 metadata headers with names containing dots (#13) * Update amis.json to support EKS AMI mocks (#15) * fix PascalCase for boolean value in ListMultipartUploads response (#17); fix _get_multi_param to parse nested list/dict query params * determine non-zero container exit code in Batch API * support filtering by dimensions in CW get_metric_statistics * fix storing attributes for ELBv2 Route entities; API GW refactorings for TF tests * add missing fields for API GW resources * fix error messages for Route53 (TF-compat) * various fixes for IAM resources (tf-compat) * minor fixes for API GW models (tf-compat) * minor fixes for API GW responses (tf-compat) * add s3 exception for bucket notification filter rule validation * change the way RESTErrors generate the response body and content-type header * fix lint errors and disable "black" syntax enforcement * remove return type hint in RESTError.get_body * add RESTError XML template for IAM exceptions * add support for API GW minimumCompressionSize * fix casing getting PrivateDnsEnabled API GW attribute * minor fixes for error responses * fix escaping special chars for IAM role descriptions (tf-compat) * minor fixes and tagging support for API GW and ELB v2 (tf-compat) * Merge branch 'master' into localstack * add "AlarmRule" attribute to enable support for composite CloudWatch metrics * fix recursive parsing of complex/nested query params * bump version * add API to delete S3 website configurations (#18) * use dict copy to allow parallelism and avoid concurrent modification exceptions in S3 * fix precondition check for etags in S3 (#19) * minor fix for user filtering in Cognito * fix API Gateway error response; avoid returning empty response templates (tf-compat) * support tags and tracingEnabled attribute for API GW stages * fix boolean value in S3 encryption response (#20) * fix connection arn structure * fix api destination arn structure * black format * release 2.0.3.37 * fix s3 exception tests see botocore/parsers.py:1002 where RequestId is removed from parsed * remove python 2 from build action * add test failure annotations in build action * fix events test arn comparisons * fix s3 encryption response test * return default value "0" if EC2 availableIpAddressCount is empty * fix extracting SecurityGroupIds for EC2 VPC endpoints * support deleting/updating API Gateway DomainNames * fix(events): Return empty string instead of null when no pattern is specified in EventPattern (tf-compat) (#22) * fix logic and revert CF changes to get tests running again (#21) * add support for EC2 customer gateway API (#25) * add support for EC2 Transit Gateway APIs (#24) * feat(logs): add `kmsKeyId` into `LogGroup` entity (#23) * minor change in ELBv2 logic to fix tests * feat(events): add APIs to describe and delete CloudWatch Events connections (#26) * add support for EC2 transit gateway route tables (#27) * pass transit gateway route table ID in Describe API, minor refactoring (#29) * add support for EC2 Transit Gateway Routes (#28) * fix region on ACM certificate import (#31) * add support for EC2 transit gateway attachments (#30) * add support for EC2 Transit Gateway VPN attachments (#32) * fix account ID for logs API * add support for DeleteOrganization API * feat(events): store raw filter representation for CloudWatch events patterns (tf-compat) (#36) * feat(events): add support to describe/update/delete CloudWatch API destinations (#35) * add Cognito UpdateIdentityPool, CW Logs PutResourcePolicy * feat(events): add support for tags in EventBus API (#38) * fix parameter validation for Batch compute environments (tf-compat) * revert merge conflicts in IMPLEMENTATION_COVERAGE.md * format code using black * restore original README; re-enable and fix CloudFormation tests * restore tests and old logic for CF stack parameters from SSM * parameterize RequestId/RequestID in response messages and revert related test changes * undo LocalStack-specific adaptations * minor fix * Update CodeCov config to reflect removal of Py2 * undo change related to CW metric filtering; add additional test for CW metric statistics with dimensions * Terraform - Extend whitelist of running tests Co-authored-by: acsbendi <acsbendi28@gmail.com> Co-authored-by: Phan Duong <duongpv@outlook.com> Co-authored-by: Thomas Rausch <thomas@thrau.at> Co-authored-by: Macwan Nevil <macnev2013@gmail.com> Co-authored-by: Dominik Schubert <dominik.schubert91@gmail.com> Co-authored-by: Gonzalo Saad <saad.gonzalo.ale@gmail.com> Co-authored-by: Mohit Alonja <monty16597@users.noreply.github.com> Co-authored-by: Miguel Gagliardo <migag9@gmail.com> Co-authored-by: Bert Blommers <info@bertblommers.nl>
2021-07-26 14:21:17 +00:00
log_group = response["logGroups"][0]
log_group.should_not.have.key("retentionInDays")
if kms_key_id:
log_group.should.have.key("kmsKeyId")
log_group["kmsKeyId"].should.equal(kms_key_id)
2018-01-14 05:35:53 +00:00
@mock_logs
def test_exceptions():
conn = boto3.client("logs", TEST_REGION)
2018-01-14 05:35:53 +00:00
log_group_name = "dummy"
log_stream_name = "dummp-stream"
conn.create_log_group(logGroupName=log_group_name)
with pytest.raises(ClientError):
2018-01-14 05:35:53 +00:00
conn.create_log_group(logGroupName=log_group_name)
conn.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
with pytest.raises(ClientError):
2018-01-14 05:35:53 +00:00
conn.create_log_stream(
logGroupName=log_group_name, logStreamName=log_stream_name
)
conn.put_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
logEvents=[{"timestamp": 0, "message": "line"}],
)
with pytest.raises(ClientError) as ex:
2018-01-14 05:35:53 +00:00
conn.put_log_events(
logGroupName=log_group_name,
logStreamName="invalid-stream",
logEvents=[{"timestamp": 0, "message": "line"}],
)
error = ex.value.response["Error"]
error["Code"].should.equal("ResourceNotFoundException")
error["Message"].should.equal("The specified log stream does not exist.")
@mock_logs
def test_put_logs():
conn = boto3.client("logs", TEST_REGION)
log_group_name = "dummy"
log_stream_name = "stream"
conn.create_log_group(logGroupName=log_group_name)
conn.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
messages = [
{"timestamp": int(unix_time_millis()), "message": "hello"},
{"timestamp": int(unix_time_millis()), "message": "world"},
]
put_results = conn.put_log_events(
logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=messages
)
res = conn.get_log_events(
logGroupName=log_group_name, logStreamName=log_stream_name
)
events = res["events"]
next_sequence_token = put_results["nextSequenceToken"]
assert isinstance(next_sequence_token, str)
assert len(next_sequence_token) == 56
events.should.have.length_of(2)
@mock_logs
def test_put_log_events_in_wrong_order():
conn = boto3.client("logs", "us-east-1")
log_group_name = "test"
log_stream_name = "teststream"
conn.create_log_group(logGroupName=log_group_name)
conn.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
ts_1 = int(unix_time_millis(datetime.utcnow() - timedelta(days=2)))
ts_2 = int(unix_time_millis(datetime.utcnow() - timedelta(days=5)))
messages = [
{"message": f"Message {idx}", "timestamp": ts}
for idx, ts in enumerate([ts_1, ts_2])
]
with pytest.raises(ClientError) as exc:
conn.put_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
logEvents=messages,
sequenceToken="49599396607703531511419593985621160512859251095480828066",
)
err = exc.value.response["Error"]
err["Code"].should.equal("InvalidParameterException")
err["Message"].should.equal(
"Log events in a single PutLogEvents request must be in chronological order."
)
@mock_logs
@pytest.mark.parametrize("days_ago", [15, 400])
def test_put_log_events_in_the_past(days_ago):
conn = boto3.client("logs", "us-east-1")
log_group_name = "test"
log_stream_name = "teststream"
conn.create_log_group(logGroupName=log_group_name)
conn.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
timestamp = int(unix_time_millis(datetime.utcnow() - timedelta(days=days_ago)))
messages = [{"message": "Message number {}", "timestamp": timestamp}]
resp = conn.put_log_events(
logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=messages
)
resp.should.have.key("rejectedLogEventsInfo").should.equal(
{"tooOldLogEventEndIndex": 0}
)
@mock_logs
@pytest.mark.parametrize("minutes", [181, 300, 999999])
def test_put_log_events_in_the_future(minutes):
conn = boto3.client("logs", "us-east-1")
log_group_name = "test"
log_stream_name = "teststream"
conn.create_log_group(logGroupName=log_group_name)
conn.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
timestamp = int(unix_time_millis(datetime.utcnow() + timedelta(minutes=minutes)))
messages = [{"message": "Message number {}", "timestamp": timestamp}]
resp = conn.put_log_events(
logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=messages
)
resp.should.have.key("rejectedLogEventsInfo").should.equal(
{"tooNewLogEventStartIndex": 0}
)
@mock_logs
def test_put_retention_policy():
conn = boto3.client("logs", TEST_REGION)
log_group_name = "dummy"
response = conn.create_log_group(logGroupName=log_group_name)
response = conn.put_retention_policy(logGroupName=log_group_name, retentionInDays=7)
response = conn.describe_log_groups(logGroupNamePrefix=log_group_name)
assert len(response["logGroups"]) == 1
assert response["logGroups"][0].get("retentionInDays") == 7
response = conn.delete_log_group(logGroupName=log_group_name)
@mock_logs
def test_delete_log_stream():
logs = boto3.client("logs", TEST_REGION)
logs.create_log_group(logGroupName="logGroup")
logs.create_log_stream(logGroupName="logGroup", logStreamName="logStream")
resp = logs.describe_log_streams(logGroupName="logGroup")
assert resp["logStreams"][0]["logStreamName"] == "logStream"
logs.delete_log_stream(logGroupName="logGroup", logStreamName="logStream")
resp = logs.describe_log_streams(logGroupName="logGroup")
assert resp["logStreams"] == []
@mock_logs
def test_delete_retention_policy():
conn = boto3.client("logs", TEST_REGION)
log_group_name = "dummy"
response = conn.create_log_group(logGroupName=log_group_name)
response = conn.put_retention_policy(logGroupName=log_group_name, retentionInDays=7)
response = conn.describe_log_groups(logGroupNamePrefix=log_group_name)
assert len(response["logGroups"]) == 1
assert response["logGroups"][0].get("retentionInDays") == 7
response = conn.delete_retention_policy(logGroupName=log_group_name)
response = conn.describe_log_groups(logGroupNamePrefix=log_group_name)
assert len(response["logGroups"]) == 1
assert response["logGroups"][0].get("retentionInDays") is None
conn.delete_log_group(logGroupName=log_group_name)
@mock_logs
def test_put_resource_policy():
client = boto3.client("logs", TEST_REGION)
# For this test a policy document with a valid ARN will be used.
log_group_name = "test_log_group"
client.create_log_group(logGroupName=log_group_name)
log_group_info = client.describe_log_groups(logGroupNamePrefix=log_group_name)
policy_name = "test_policy"
policy_doc = json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "Route53LogsToCloudWatchLogs",
"Effect": "Allow",
"Principal": {"Service": ["route53.amazonaws.com"]},
"Action": "logs:PutLogEvents",
"Resource": log_group_info["logGroups"][0]["arn"],
}
],
}
)
response = client.put_resource_policy(
policyName=policy_name, policyDocument=policy_doc
)
assert response["resourcePolicy"]["policyName"] == policy_name
assert response["resourcePolicy"]["policyDocument"] == policy_doc
assert response["resourcePolicy"]["lastUpdatedTime"] <= int(unix_time_millis())
client.delete_log_group(logGroupName=log_group_name)
# put_resource_policy with same policy name should update the resouce
created_time = response["resourcePolicy"]["lastUpdatedTime"]
with freeze_time(timedelta(minutes=1)):
new_document = '{"Statement":[{"Action":"logs:*","Effect":"Allow","Principal":"*","Resource":"*"}]}'
policy_info = client.put_resource_policy(
policyName=policy_name, policyDocument=new_document
)["resourcePolicy"]
assert policy_info["policyName"] == policy_name
assert policy_info["policyDocument"] == new_document
assert created_time < policy_info["lastUpdatedTime"] <= int(unix_time_millis())
@mock_logs
def test_put_resource_policy_too_many():
client = boto3.client("logs", TEST_REGION)
# Create the maximum number of resource policies.
for idx in range(MAX_RESOURCE_POLICIES_PER_REGION):
policy_name = f"test_policy_{idx}"
client.put_resource_policy(
policyName=policy_name, policyDocument=json.dumps(json_policy_doc)
)
# Now create one more policy, which should generate an error.
with pytest.raises(ClientError) as exc:
client.put_resource_policy(
policyName="too_many", policyDocument=json.dumps(json_policy_doc)
)
exc_value = exc.value
exc_value.operation_name.should.equal("PutResourcePolicy")
exc_value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
exc_value.response["Error"]["Code"].should.equal("LimitExceededException")
exc_value.response["Error"]["Message"].should.contain("Resource limit exceeded.")
# put_resource_policy on already created policy, shouldnt throw any error
client.put_resource_policy(
policyName="test_policy_1", policyDocument=json.dumps(json_policy_doc)
)
@mock_logs
def test_delete_resource_policy():
client = boto3.client("logs", TEST_REGION)
# Create a bunch of resource policies so we can give delete a workout.
base_policy_name = "test_policy"
for idx in range(MAX_RESOURCE_POLICIES_PER_REGION):
client.put_resource_policy(
policyName=f"{base_policy_name}_{idx}", policyDocument=json_policy_doc
)
# Verify that all those resource policies can be deleted.
for idx in range(MAX_RESOURCE_POLICIES_PER_REGION):
client.delete_resource_policy(policyName=f"{base_policy_name}_{idx}")
# Verify there are no resource policies.
response = client.describe_resource_policies()
policies = response["resourcePolicies"]
assert not policies
# Try deleting a non-existent resource policy.
with pytest.raises(ClientError) as exc:
client.delete_resource_policy(policyName="non-existent")
exc_value = exc.value
exc_value.operation_name.should.equal("DeleteResourcePolicy")
exc_value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
exc_value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
exc_value.response["Error"]["Message"].should.contain(
"Policy with name [non-existent] does not exist"
)
@mock_logs
def test_describe_resource_policies():
client = boto3.client("logs", TEST_REGION)
# Create the maximum number of resource policies so there's something
# to retrieve.
for idx in range(MAX_RESOURCE_POLICIES_PER_REGION):
policy_name = f"test_policy_{idx}"
client.put_resource_policy(
policyName=policy_name, policyDocument=json_policy_doc
)
# Retrieve all of the resource policies that were just created.
response = client.describe_resource_policies(limit=50)
assert "resourcePolicies" in response
policies = response["resourcePolicies"]
assert len(policies) == MAX_RESOURCE_POLICIES_PER_REGION
# Verify the retrieved list is valid.
now_millis = int(unix_time_millis())
for idx, policy in enumerate(policies):
assert policy["policyName"] == f"test_policy_{idx}"
assert policy["policyDocument"] == json_policy_doc
assert policy["lastUpdatedTime"] <= now_millis
@mock_logs
def test_get_log_events():
client = boto3.client("logs", TEST_REGION)
log_group_name = "test"
log_stream_name = "stream"
client.create_log_group(logGroupName=log_group_name)
client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
data = [
(int(unix_time_millis(datetime.utcnow() + timedelta(milliseconds=x))), str(x))
for x in range(20)
]
events = [{"timestamp": x, "message": y} for x, y in data]
client.put_log_events(
logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=events
)
resp = client.get_log_events(
logGroupName=log_group_name, logStreamName=log_stream_name, limit=10
)
resp["events"].should.have.length_of(10)
for idx, (x, y) in enumerate(data[10:]):
resp["events"][idx]["timestamp"].should.equal(x)
resp["events"][idx]["message"].should.equal(y)
resp["nextForwardToken"].should.equal(
"f/00000000000000000000000000000000000000000000000000000019"
)
resp["nextBackwardToken"].should.equal(
"b/00000000000000000000000000000000000000000000000000000010"
)
resp = client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken=resp["nextBackwardToken"],
limit=20,
)
resp["events"].should.have.length_of(10)
for idx, (x, y) in enumerate(data[0:10]):
resp["events"][idx]["timestamp"].should.equal(x)
resp["events"][idx]["message"].should.equal(y)
resp["nextForwardToken"].should.equal(
"f/00000000000000000000000000000000000000000000000000000009"
)
resp["nextBackwardToken"].should.equal(
"b/00000000000000000000000000000000000000000000000000000000"
)
resp = client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken=resp["nextBackwardToken"],
limit=10,
)
resp["events"].should.have.length_of(0)
resp["nextForwardToken"].should.equal(
"f/00000000000000000000000000000000000000000000000000000000"
)
resp["nextBackwardToken"].should.equal(
"b/00000000000000000000000000000000000000000000000000000000"
)
resp = client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken=resp["nextForwardToken"],
limit=1,
)
resp["events"].should.have.length_of(1)
x, y = data[1]
resp["events"][0]["timestamp"].should.equal(x)
resp["events"][0]["message"].should.equal(y)
resp["nextForwardToken"].should.equal(
"f/00000000000000000000000000000000000000000000000000000001"
)
resp["nextBackwardToken"].should.equal(
"b/00000000000000000000000000000000000000000000000000000001"
)
@mock_logs
def test_get_log_events_with_start_from_head():
client = boto3.client("logs", TEST_REGION)
log_group_name = "test"
log_stream_name = "stream"
client.create_log_group(logGroupName=log_group_name)
client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
data = [
(int(unix_time_millis(datetime.utcnow() + timedelta(milliseconds=x))), str(x))
for x in range(20)
]
events = [{"timestamp": x, "message": y} for x, y in data]
client.put_log_events(
logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=events
)
resp = client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
limit=10,
startFromHead=True, # this parameter is only relevant without the usage of nextToken
)
resp["events"].should.have.length_of(10)
for idx, (x, y) in enumerate(data[0:10]):
resp["events"][idx]["timestamp"].should.equal(x)
resp["events"][idx]["message"].should.equal(y)
resp["nextForwardToken"].should.equal(
"f/00000000000000000000000000000000000000000000000000000009"
)
resp["nextBackwardToken"].should.equal(
"b/00000000000000000000000000000000000000000000000000000000"
)
resp = client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken=resp["nextForwardToken"],
limit=20,
)
resp["events"].should.have.length_of(10)
for idx, (x, y) in enumerate(data[10:]):
resp["events"][idx]["timestamp"].should.equal(x)
resp["events"][idx]["message"].should.equal(y)
resp["nextForwardToken"].should.equal(
"f/00000000000000000000000000000000000000000000000000000019"
)
resp["nextBackwardToken"].should.equal(
"b/00000000000000000000000000000000000000000000000000000010"
)
resp = client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken=resp["nextForwardToken"],
limit=10,
)
resp["events"].should.have.length_of(0)
resp["nextForwardToken"].should.equal(
"f/00000000000000000000000000000000000000000000000000000019"
)
resp["nextBackwardToken"].should.equal(
"b/00000000000000000000000000000000000000000000000000000019"
)
resp = client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken=resp["nextBackwardToken"],
limit=1,
)
resp["events"].should.have.length_of(1)
x, y = data[18]
resp["events"][0]["timestamp"].should.equal(x)
resp["events"][0]["message"].should.equal(y)
resp["nextForwardToken"].should.equal(
"f/00000000000000000000000000000000000000000000000000000018"
)
resp["nextBackwardToken"].should.equal(
"b/00000000000000000000000000000000000000000000000000000018"
)
@mock_logs
def test_get_log_events_errors():
client = boto3.client("logs", TEST_REGION)
log_group_name = "test"
log_stream_name = "stream"
client.create_log_group(logGroupName=log_group_name)
client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
with pytest.raises(ClientError) as exc:
client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken="n/00000000000000000000000000000000000000000000000000000000",
)
exc_value = exc.value
exc_value.operation_name.should.equal("GetLogEvents")
exc_value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
exc_value.response["Error"]["Code"].should.equal("InvalidParameterException")
exc_value.response["Error"]["Message"].should.contain(
"The specified nextToken is invalid."
)
with pytest.raises(ClientError) as exc:
client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken="not-existing-token",
)
exc_value = exc.value
exc_value.operation_name.should.equal("GetLogEvents")
exc_value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
exc_value.response["Error"]["Code"].should.equal("InvalidParameterException")
exc_value.response["Error"]["Message"].should.contain(
"The specified nextToken is invalid."
)
@mock_logs
def test_list_tags_log_group():
conn = boto3.client("logs", TEST_REGION)
log_group_name = "dummy"
tags = {"tag_key_1": "tag_value_1", "tag_key_2": "tag_value_2"}
response = conn.create_log_group(logGroupName=log_group_name)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == {}
response = conn.delete_log_group(logGroupName=log_group_name)
response = conn.create_log_group(logGroupName=log_group_name, tags=tags)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags
response = conn.delete_log_group(logGroupName=log_group_name)
2019-09-26 16:09:10 +00:00
@mock_logs
def test_tag_log_group():
conn = boto3.client("logs", TEST_REGION)
2019-09-26 16:09:10 +00:00
log_group_name = "dummy"
tags = {"tag_key_1": "tag_value_1"}
response = conn.create_log_group(logGroupName=log_group_name)
response = conn.tag_log_group(logGroupName=log_group_name, tags=tags)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags
tags_with_added_value = {"tag_key_1": "tag_value_1", "tag_key_2": "tag_value_2"}
response = conn.tag_log_group(
logGroupName=log_group_name, tags={"tag_key_2": "tag_value_2"}
)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags_with_added_value
tags_with_updated_value = {"tag_key_1": "tag_value_XX", "tag_key_2": "tag_value_2"}
response = conn.tag_log_group(
logGroupName=log_group_name, tags={"tag_key_1": "tag_value_XX"}
)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags_with_updated_value
response = conn.delete_log_group(logGroupName=log_group_name)
2019-09-26 19:20:53 +00:00
@mock_logs
def test_untag_log_group():
conn = boto3.client("logs", TEST_REGION)
2019-09-26 19:20:53 +00:00
log_group_name = "dummy"
response = conn.create_log_group(logGroupName=log_group_name)
tags = {"tag_key_1": "tag_value_1", "tag_key_2": "tag_value_2"}
response = conn.tag_log_group(logGroupName=log_group_name, tags=tags)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags
tags_to_remove = ["tag_key_1"]
remaining_tags = {"tag_key_2": "tag_value_2"}
response = conn.untag_log_group(logGroupName=log_group_name, tags=tags_to_remove)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == remaining_tags
response = conn.delete_log_group(logGroupName=log_group_name)
@mock_logs
def test_describe_subscription_filters():
# given
client = boto3.client("logs", "us-east-1")
log_group_name = "/test"
client.create_log_group(logGroupName=log_group_name)
# when
response = client.describe_subscription_filters(logGroupName=log_group_name)
# then
response["subscriptionFilters"].should.have.length_of(0)
@mock_logs
def test_describe_subscription_filters_errors():
# given
client = boto3.client("logs", "us-east-1")
# when
with pytest.raises(ClientError) as exc:
2020-11-11 15:55:37 +00:00
client.describe_subscription_filters(logGroupName="not-existing-log-group")
# then
exc_value = exc.value
exc_value.operation_name.should.equal("DescribeSubscriptionFilters")
exc_value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
exc_value.response["Error"]["Code"].should.contain("ResourceNotFoundException")
exc_value.response["Error"]["Message"].should.equal(
"The specified log group does not exist"
)
@mock_logs
def test_describe_log_groups_paging():
client = boto3.client("logs", "us-east-1")
group_names = [
"/aws/lambda/lowercase-dev",
"/aws/lambda/FileMonitoring",
"/aws/events/GetMetricData",
"/aws/lambda/fileAvailable",
]
for name in group_names:
client.create_log_group(logGroupName=name)
resp = client.describe_log_groups()
resp["logGroups"].should.have.length_of(4)
resp.should_not.have.key("nextToken")
resp = client.describe_log_groups(limit=2)
resp["logGroups"].should.have.length_of(2)
resp.should.have.key("nextToken")
resp = client.describe_log_groups(nextToken=resp["nextToken"], limit=1)
resp["logGroups"].should.have.length_of(1)
resp.should.have.key("nextToken")
resp = client.describe_log_groups(nextToken=resp["nextToken"])
resp["logGroups"].should.have.length_of(1)
resp["logGroups"][0]["logGroupName"].should.equal("/aws/lambda/lowercase-dev")
resp.should_not.have.key("nextToken")
resp = client.describe_log_groups(nextToken="invalid-token")
resp["logGroups"].should.have.length_of(0)
resp.should_not.have.key("nextToken")
@mock_logs
def test_describe_log_streams_simple_paging():
client = boto3.client("logs", "us-east-1")
group_name = "/aws/lambda/lowercase-dev"
client.create_log_group(logGroupName=group_name)
stream_names = ["stream" + str(i) for i in range(0, 10)]
for name in stream_names:
client.create_log_stream(logGroupName=group_name, logStreamName=name)
# Get stream 1-10
resp = client.describe_log_streams(logGroupName=group_name)
resp["logStreams"].should.have.length_of(10)
resp.should_not.have.key("nextToken")
# Get stream 1-4
resp = client.describe_log_streams(logGroupName=group_name, limit=4)
resp["logStreams"].should.have.length_of(4)
2022-04-27 11:58:59 +00:00
[stream["logStreamName"] for stream in resp["logStreams"]].should.equal(
["stream0", "stream1", "stream2", "stream3"]
)
resp.should.have.key("nextToken")
# Get stream 4-8
resp = client.describe_log_streams(
logGroupName=group_name, limit=4, nextToken=str(resp["nextToken"])
)
resp["logStreams"].should.have.length_of(4)
2022-04-27 11:58:59 +00:00
[stream["logStreamName"] for stream in resp["logStreams"]].should.equal(
["stream4", "stream5", "stream6", "stream7"]
)
resp.should.have.key("nextToken")
# Get stream 8-10
resp = client.describe_log_streams(
logGroupName=group_name, limit=4, nextToken=str(resp["nextToken"])
)
resp["logStreams"].should.have.length_of(2)
2022-04-27 11:58:59 +00:00
[stream["logStreamName"] for stream in resp["logStreams"]].should.equal(
["stream8", "stream9"]
)
resp.should_not.have.key("nextToken")
@mock_logs
def test_describe_log_streams_paging():
client = boto3.client("logs", "us-east-1")
log_group_name = "/aws/codebuild/lowercase-dev"
stream_names = [
"job/214/stage/unit_tests/foo",
"job/215/stage/unit_tests/spam",
"job/215/stage/e2e_tests/eggs",
"job/216/stage/unit_tests/eggs",
]
client.create_log_group(logGroupName=log_group_name)
for name in stream_names:
client.create_log_stream(logGroupName=log_group_name, logStreamName=name)
resp = client.describe_log_streams(logGroupName=log_group_name)
resp["logStreams"].should.have.length_of(4)
resp["logStreams"][0]["arn"].should.contain(log_group_name)
resp.should_not.have.key("nextToken")
resp = client.describe_log_streams(logGroupName=log_group_name, limit=2)
resp["logStreams"].should.have.length_of(2)
resp["logStreams"][0]["arn"].should.contain(log_group_name)
resp["nextToken"].should.equal(
f"{log_group_name}@{resp['logStreams'][1]['logStreamName']}"
)
resp = client.describe_log_streams(
logGroupName=log_group_name, nextToken=resp["nextToken"], limit=1
)
resp["logStreams"].should.have.length_of(1)
resp["logStreams"][0]["arn"].should.contain(log_group_name)
resp["nextToken"].should.equal(
f"{log_group_name}@{resp['logStreams'][0]['logStreamName']}"
)
resp = client.describe_log_streams(
logGroupName=log_group_name, nextToken=resp["nextToken"]
)
resp["logStreams"].should.have.length_of(1)
resp["logStreams"][0]["arn"].should.contain(log_group_name)
resp.should_not.have.key("nextToken")
resp = client.describe_log_streams(
logGroupName=log_group_name, nextToken="invalid-token"
)
resp["logStreams"].should.have.length_of(0)
resp.should_not.have.key("nextToken")
resp = client.describe_log_streams(
logGroupName=log_group_name, nextToken="invalid@token"
)
resp["logStreams"].should.have.length_of(0)
resp.should_not.have.key("nextToken")
@mock_logs
def test_start_query():
client = boto3.client("logs", "us-east-1")
log_group_name = "/aws/codebuild/lowercase-dev"
client.create_log_group(logGroupName=log_group_name)
response = client.start_query(
logGroupName=log_group_name,
startTime=int(time.time()),
endTime=int(time.time()) + 300,
queryString="test",
)
assert "queryId" in response
with pytest.raises(ClientError) as exc:
client.start_query(
logGroupName="/aws/codebuild/lowercase-dev-invalid",
startTime=int(time.time()),
endTime=int(time.time()) + 300,
queryString="test",
)
# then
exc_value = exc.value
exc_value.response["Error"]["Code"].should.contain("ResourceNotFoundException")
exc_value.response["Error"]["Message"].should.equal(
"The specified log group does not exist"
)
@pytest.mark.parametrize("nr_of_events", [10001, 1000000])
@mock_logs
def test_get_too_many_log_events(nr_of_events):
client = boto3.client("logs", "us-east-1")
log_group_name = "dummy"
log_stream_name = "stream"
client.create_log_group(logGroupName=log_group_name)
client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
with pytest.raises(ClientError) as ex:
client.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
limit=nr_of_events,
)
err = ex.value.response["Error"]
err["Code"].should.equal("InvalidParameterException")
err["Message"].should.contain("1 validation error detected")
err["Message"].should.contain(
f"Value '{nr_of_events}' at 'limit' failed to satisfy constraint"
)
err["Message"].should.contain("Member must have value less than or equal to 10000")
@pytest.mark.parametrize("nr_of_events", [10001, 1000000])
@mock_logs
def test_filter_too_many_log_events(nr_of_events):
client = boto3.client("logs", "us-east-1")
log_group_name = "dummy"
log_stream_name = "stream"
client.create_log_group(logGroupName=log_group_name)
client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name)
with pytest.raises(ClientError) as ex:
client.filter_log_events(
logGroupName=log_group_name,
logStreamNames=[log_stream_name],
limit=nr_of_events,
)
err = ex.value.response["Error"]
err["Code"].should.equal("InvalidParameterException")
err["Message"].should.contain("1 validation error detected")
err["Message"].should.contain(
f"Value '{nr_of_events}' at 'limit' failed to satisfy constraint"
)
err["Message"].should.contain("Member must have value less than or equal to 10000")
@pytest.mark.parametrize("nr_of_groups", [51, 100])
@mock_logs
def test_describe_too_many_log_groups(nr_of_groups):
client = boto3.client("logs", "us-east-1")
with pytest.raises(ClientError) as ex:
client.describe_log_groups(limit=nr_of_groups)
err = ex.value.response["Error"]
err["Code"].should.equal("InvalidParameterException")
err["Message"].should.contain("1 validation error detected")
err["Message"].should.contain(
f"Value '{nr_of_groups}' at 'limit' failed to satisfy constraint"
)
err["Message"].should.contain("Member must have value less than or equal to 50")
@pytest.mark.parametrize("nr_of_streams", [51, 100])
@mock_logs
def test_describe_too_many_log_streams(nr_of_streams):
client = boto3.client("logs", "us-east-1")
log_group_name = "dummy"
client.create_log_group(logGroupName=log_group_name)
with pytest.raises(ClientError) as ex:
client.describe_log_streams(logGroupName=log_group_name, limit=nr_of_streams)
err = ex.value.response["Error"]
err["Code"].should.equal("InvalidParameterException")
err["Message"].should.contain("1 validation error detected")
err["Message"].should.contain(
f"Value '{nr_of_streams}' at 'limit' failed to satisfy constraint"
)
err["Message"].should.contain("Member must have value less than or equal to 50")
@pytest.mark.parametrize("length", [513, 1000])
@mock_logs
def test_create_log_group_invalid_name_length(length):
log_group_name = "a" * length
client = boto3.client("logs", "us-east-1")
with pytest.raises(ClientError) as ex:
client.create_log_group(logGroupName=log_group_name)
err = ex.value.response["Error"]
err["Code"].should.equal("InvalidParameterException")
err["Message"].should.contain("1 validation error detected")
err["Message"].should.contain(
f"Value '{log_group_name}' at 'logGroupName' failed to satisfy constraint"
)
err["Message"].should.contain("Member must have length less than or equal to 512")
@pytest.mark.parametrize("invalid_orderby", ["", "sth", "LogStreamname"])
@mock_logs
def test_describe_log_streams_invalid_order_by(invalid_orderby):
client = boto3.client("logs", "us-east-1")
log_group_name = "dummy"
client.create_log_group(logGroupName=log_group_name)
with pytest.raises(ClientError) as ex:
client.describe_log_streams(
logGroupName=log_group_name, orderBy=invalid_orderby
)
err = ex.value.response["Error"]
err["Code"].should.equal("InvalidParameterException")
err["Message"].should.contain("1 validation error detected")
err["Message"].should.contain(
f"Value '{invalid_orderby}' at 'orderBy' failed to satisfy constraint"
)
err["Message"].should.contain(
"Member must satisfy enum value set: [LogStreamName, LastEventTime]"
)
@mock_logs
def test_describe_log_streams_no_prefix():
"""
From the docs: If orderBy is LastEventTime , you cannot specify [logStreamNamePrefix]
"""
client = boto3.client("logs", "us-east-1")
log_group_name = "dummy"
client.create_log_group(logGroupName=log_group_name)
with pytest.raises(ClientError) as ex:
client.describe_log_streams(
logGroupName=log_group_name,
orderBy="LastEventTime",
logStreamNamePrefix="sth",
)
err = ex.value.response["Error"]
err["Code"].should.equal("InvalidParameterException")
err["Message"].should.equal(
"Cannot order by LastEventTime with a logStreamNamePrefix."
)
@mock_s3
@mock_logs
def test_create_export_task_happy_path():
log_group_name = "/aws/codebuild/blah1"
destination = "mybucket"
fromTime = 1611316574
to = 1642852574
logs = boto3.client("logs", region_name="ap-southeast-1")
s3 = boto3.client("s3")
logs.create_log_group(logGroupName=log_group_name)
s3.create_bucket(Bucket=destination)
resp = logs.create_export_task(
logGroupName=log_group_name, fromTime=fromTime, to=to, destination=destination
)
# taskId resembles a valid UUID (i.e. a string of 32 hexadecimal digits)
assert UUID(resp["taskId"])
assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
@mock_logs
def test_create_export_task_raises_ClientError_when_bucket_not_found():
log_group_name = "/aws/codebuild/blah1"
destination = "368a7022dea3dd621"
fromTime = 1611316574
to = 1642852574
logs = boto3.client("logs", region_name="ap-southeast-1")
logs.create_log_group(logGroupName=log_group_name)
with pytest.raises(ClientError):
logs.create_export_task(
logGroupName=log_group_name,
fromTime=fromTime,
to=to,
destination=destination,
)
@mock_s3
@mock_logs
def test_create_export_raises_ResourceNotFoundException_log_group_not_found():
log_group_name = "/aws/codebuild/blah1"
destination = "mybucket"
fromTime = 1611316574
to = 1642852574
s3 = boto3.client("s3")
s3.create_bucket(Bucket=destination)
logs = boto3.client("logs", region_name="ap-southeast-1")
with pytest.raises(logs.exceptions.ResourceNotFoundException):
logs.create_export_task(
logGroupName=log_group_name,
fromTime=fromTime,
to=to,
destination=destination,
)