Logs: Tagging support (#6734)

This commit is contained in:
Bert Blommers 2023-08-27 18:14:51 +00:00 committed by GitHub
parent ffc8b7dd08
commit 956dd265f0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 208 additions and 68 deletions

View File

@ -4368,7 +4368,7 @@
## logs
<details>
<summary>60% implemented</summary>
<summary>66% implemented</summary>
- [ ] associate_kms_key
- [ ] cancel_export_task
@ -4402,7 +4402,7 @@
- [ ] get_log_group_fields
- [ ] get_log_record
- [X] get_query_results
- [ ] list_tags_for_resource
- [X] list_tags_for_resource
- [X] list_tags_log_group
- [ ] put_account_policy
- [ ] put_data_protection_policy
@ -4417,10 +4417,10 @@
- [X] start_query
- [ ] stop_query
- [X] tag_log_group
- [ ] tag_resource
- [X] tag_resource
- [ ] test_metric_filter
- [X] untag_log_group
- [ ] untag_resource
- [X] untag_resource
</details>
## managedblockchain
@ -5792,7 +5792,7 @@
## resourcegroupstaggingapi
<details>
<summary>37% implemented</summary>
<summary>50% implemented</summary>
- [ ] describe_report_creation
- [ ] get_compliance_summary
@ -5800,7 +5800,7 @@
- [X] get_tag_keys
- [X] get_tag_values
- [ ] start_report_creation
- [ ] tag_resources
- [X] tag_resources
- [ ] untag_resources
</details>

View File

@ -38,7 +38,9 @@ logs
- [X] delete_metric_filter
- [ ] delete_query_definition
- [X] delete_resource_policy
Remove resource policy with a policy name matching given name.
Remove resource policy with a policy name matching given name.
- [X] delete_retention_policy
- [X] delete_subscription_filter
@ -55,7 +57,8 @@ logs
- [ ] describe_query_definitions
- [X] describe_resource_policies
Return list of resource policies.
Return list of resource policies.
The next_token and limit arguments are ignored. The maximum
number of resource policies per region is a small number (less
@ -79,7 +82,7 @@ logs
Not all query commands are implemented yet. Please raise an issue if you encounter unexpected results.
- [ ] list_tags_for_resource
- [X] list_tags_for_resource
- [X] list_tags_log_group
- [ ] put_account_policy
- [ ] put_data_protection_policy
@ -93,15 +96,17 @@ logs
- [X] put_metric_filter
- [ ] put_query_definition
- [X] put_resource_policy
Creates/updates resource policy and return policy object
Creates/updates resource policy and return policy object
- [X] put_retention_policy
- [X] put_subscription_filter
- [X] start_query
- [ ] stop_query
- [X] tag_log_group
- [ ] tag_resource
- [X] tag_resource
- [ ] test_metric_filter
- [X] untag_log_group
- [ ] untag_resource
- [X] untag_resource

View File

@ -31,6 +31,10 @@ resourcegroupstaggingapi
- [X] get_tag_keys
- [X] get_tag_values
- [ ] start_report_creation
- [ ] tag_resources
- [X] tag_resources
Only Logs and RDS resources are currently supported
- [ ] untag_resources

View File

@ -761,7 +761,7 @@ class LambdaFunction(CloudFormationModel, DockerModel):
def _invoke_lambda(self, event: Optional[str] = None) -> Tuple[str, bool, str]:
# Create the LogGroup if necessary, to write the result to
self.logs_backend.ensure_log_group(self.logs_group_name, [])
self.logs_backend.ensure_log_group(self.logs_group_name)
# TODO: context not yet implemented
if event is None:
event = dict() # type: ignore[assignment]

View File

@ -858,7 +858,7 @@ class Job(threading.Thread, BaseModel, DockerModel, ManagedState):
# Send to cloudwatch
self.log_stream_name = self._stream_name
self._log_backend.ensure_log_group(self._log_group, None)
self._log_backend.ensure_log_group(self._log_group)
self._log_backend.ensure_log_stream(
self._log_group, self.log_stream_name
)

View File

@ -14,6 +14,7 @@ from moto.logs.logs_query import execute_query
from moto.moto_api._internal import mock_random
from moto.s3.models import s3_backends
from moto.utilities.paginator import paginate
from moto.utilities.tagging_service import TaggingService
from .utils import PAGINATION_MODEL, EventMessageFilter
MAX_RESOURCE_POLICIES_PER_REGION = 10
@ -373,7 +374,6 @@ class LogGroup(CloudFormationModel):
account_id: str,
region: str,
name: str,
tags: Optional[Dict[str, str]],
**kwargs: Any,
):
self.name = name
@ -381,7 +381,6 @@ class LogGroup(CloudFormationModel):
self.region = region
self.arn = f"arn:aws:logs:{region}:{account_id}:log-group:{name}"
self.creation_time = int(unix_time_millis())
self.tags = tags
self.streams: Dict[str, LogStream] = dict() # {name: LogStream}
# AWS defaults to Never Expire for log group retention
self.retention_in_days = kwargs.get("RetentionInDays")
@ -607,21 +606,6 @@ class LogGroup(CloudFormationModel):
def set_retention_policy(self, retention_in_days: Optional[str]) -> None:
self.retention_in_days = retention_in_days
def list_tags(self) -> Dict[str, str]:
return self.tags if self.tags else {}
def tag(self, tags: Dict[str, str]) -> None:
if self.tags:
self.tags.update(tags)
else:
self.tags = tags
def untag(self, tags_to_remove: List[str]) -> None:
if self.tags:
self.tags = {
k: v for (k, v) in self.tags.items() if k not in tags_to_remove
}
def describe_subscription_filters(self) -> Iterable[SubscriptionFilter]:
return self.subscription_filters.values()
@ -741,6 +725,7 @@ class LogsBackend(BaseBackend):
self.queries: Dict[str, LogQuery] = dict()
self.resource_policies: Dict[str, LogResourcePolicy] = dict()
self.destinations: Dict[str, Destination] = dict()
self.tagger = TaggingService()
@staticmethod
def default_vpc_endpoint_service(
@ -763,17 +748,18 @@ class LogsBackend(BaseBackend):
value=log_group_name,
)
self.groups[log_group_name] = LogGroup(
self.account_id, self.region_name, log_group_name, tags, **kwargs
self.account_id, self.region_name, log_group_name, **kwargs
)
self.tag_resource(self.groups[log_group_name].arn, tags)
return self.groups[log_group_name]
def ensure_log_group(
self, log_group_name: str, tags: Optional[Dict[str, str]]
) -> None:
def ensure_log_group(self, log_group_name: str) -> None:
if log_group_name in self.groups:
return
self.groups[log_group_name] = LogGroup(
self.account_id, self.region_name, log_group_name, tags
self.account_id,
self.region_name,
log_group_name,
)
def delete_log_group(self, log_group_name: str) -> None:
@ -801,7 +787,11 @@ class LogsBackend(BaseBackend):
raise ResourceNotFoundException()
def put_destination(
self, destination_name: str, role_arn: str, target_arn: str
self,
destination_name: str,
role_arn: str,
target_arn: str,
tags: Dict[str, str],
) -> Destination:
for _, destination in self.destinations.items():
if destination.destination_name == destination_name:
@ -814,6 +804,7 @@ class LogsBackend(BaseBackend):
self.account_id, self.region_name, destination_name, role_arn, target_arn
)
self.destinations[destination.arn] = destination
self.tag_resource(destination.arn, tags)
return destination
def delete_destination(self, destination_name: str) -> None:
@ -1010,7 +1001,8 @@ class LogsBackend(BaseBackend):
self.groups[log_group_name].set_retention_policy(None)
def describe_resource_policies(self) -> List[LogResourcePolicy]:
"""Return list of resource policies.
"""
Return list of resource policies.
The next_token and limit arguments are ignored. The maximum
number of resource policies per region is a small number (less
@ -1022,7 +1014,9 @@ class LogsBackend(BaseBackend):
def put_resource_policy(
self, policy_name: str, policy_doc: str
) -> LogResourcePolicy:
"""Creates/updates resource policy and return policy object"""
"""
Creates/updates resource policy and return policy object
"""
if policy_name in self.resource_policies:
policy = self.resource_policies[policy_name]
policy.update(policy_doc)
@ -1034,7 +1028,9 @@ class LogsBackend(BaseBackend):
return policy
def delete_resource_policy(self, policy_name: str) -> None:
"""Remove resource policy with a policy name matching given name."""
"""
Remove resource policy with a policy name matching given name.
"""
if policy_name not in self.resource_policies:
raise ResourceNotFoundException(
msg=f"Policy with name [{policy_name}] does not exist"
@ -1045,19 +1041,19 @@ class LogsBackend(BaseBackend):
if log_group_name not in self.groups:
raise ResourceNotFoundException()
log_group = self.groups[log_group_name]
return log_group.list_tags()
return self.list_tags_for_resource(log_group.arn)
def tag_log_group(self, log_group_name: str, tags: Dict[str, str]) -> None:
if log_group_name not in self.groups:
raise ResourceNotFoundException()
log_group = self.groups[log_group_name]
log_group.tag(tags)
self.tag_resource(log_group.arn, tags)
def untag_log_group(self, log_group_name: str, tags: List[str]) -> None:
if log_group_name not in self.groups:
raise ResourceNotFoundException()
log_group = self.groups[log_group_name]
log_group.untag(tags)
self.untag_resource(log_group.arn, tags)
def put_metric_filter(
self,
@ -1213,5 +1209,14 @@ class LogsBackend(BaseBackend):
raise ResourceNotFoundException()
return str(mock_random.uuid4())
def list_tags_for_resource(self, resource_arn: str) -> Dict[str, str]:
return self.tagger.get_tag_dict_for_resource(resource_arn)
def tag_resource(self, arn: str, tags: Dict[str, str]) -> None:
self.tagger.tag_resource(arn, TaggingService.convert_dict_to_tags_input(tags))
def untag_resource(self, arn: str, tag_keys: List[str]) -> None:
self.tagger.untag_resource_using_names(arn, tag_keys)
logs_backends = BackendDict(LogsBackend, "logs")

View File

@ -187,9 +187,13 @@ class LogsResponse(BaseResponse):
destination_name = self._get_param("destinationName")
role_arn = self._get_param("roleArn")
target_arn = self._get_param("targetArn")
tags = self._get_param("tags")
destination = self.logs_backend.put_destination(
destination_name, role_arn, target_arn
destination_name,
role_arn,
target_arn,
tags,
)
result = {"destination": destination.to_dict()}
return json.dumps(result)
@ -435,3 +439,20 @@ class LogsResponse(BaseResponse):
log_group_name=log_group_name, destination=destination
)
return json.dumps(dict(taskId=str(task_id)))
def list_tags_for_resource(self) -> str:
resource_arn = self._get_param("resourceArn")
tags = self.logs_backend.list_tags_for_resource(resource_arn)
return json.dumps({"tags": tags})
def tag_resource(self) -> str:
resource_arn = self._get_param("resourceArn")
tags = self._get_param("tags")
self.logs_backend.tag_resource(resource_arn, tags)
return "{}"
def untag_resource(self) -> str:
resource_arn = self._get_param("resourceArn")
tag_keys = self._get_param("tagKeys")
self.logs_backend.untag_resource(resource_arn, tag_keys)
return "{}"

View File

@ -10,6 +10,7 @@ from moto.elb.models import elb_backends, ELBBackend
from moto.elbv2.models import elbv2_backends, ELBv2Backend
from moto.glue.models import glue_backends, GlueBackend
from moto.kinesis.models import kinesis_backends, KinesisBackend
from moto.logs.models import logs_backends, LogsBackend
from moto.kms.models import kms_backends, KmsBackend
from moto.rds.models import rds_backends, RDSBackend
from moto.glacier.models import glacier_backends, GlacierBackend
@ -31,7 +32,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
# Like 'someuuid': {'gen': <generator>, 'misc': None}
# Misc is there for peeking from a generator and it cant
# fit in the current request. As we only store generators
# theres not really any point to clean up
# there is really no point cleaning up
@property
def s3_backend(self) -> S3Backend:
@ -61,6 +62,10 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
def kms_backend(self) -> KmsBackend:
return kms_backends[self.account_id][self.region_name]
@property
def logs_backend(self) -> LogsBackend:
return logs_backends[self.account_id][self.region_name]
@property
def rds_backend(self) -> RDSBackend:
return rds_backends[self.account_id][self.region_name]
@ -101,7 +106,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
# Check key matches
filters.append(lambda t, v, key=tag_filter_dict["Key"]: t == key)
elif len(values) == 1:
# Check its exactly the same as key, value
# Check it's exactly the same as key, value
filters.append(
lambda t, v, key=tag_filter_dict["Key"], value=values[0]: t == key # type: ignore
and v == value
@ -371,6 +376,21 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
yield {"ResourceARN": f"{kms_key.arn}", "Tags": tags}
# LOGS
if (
not resource_type_filters
or "logs" in resource_type_filters
or "logs:loggroup" in resource_type_filters
):
for group in self.logs_backend.groups.values():
log_tags = self.logs_backend.list_tags_for_resource(group.arn)
tags = format_tags(log_tags)
if not log_tags or not tag_filter(tags):
# Skip if no tags, or invalid filter
continue
yield {"ResourceARN": group.arn, "Tags": tags}
# RDS resources
resource_map: Dict[str, Dict[str, Any]] = {
"rds:cluster": self.rds_backend.clusters,
@ -733,7 +753,7 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
self, resource_arns: List[str], tags: Dict[str, str]
) -> Dict[str, Dict[str, Any]]:
"""
Only RDS resources are currently supported
Only Logs and RDS resources are currently supported
"""
missing_resources = []
missing_error: Dict[str, Any] = {
@ -746,6 +766,8 @@ class ResourceGroupsTaggingAPIBackend(BaseBackend):
self.rds_backend.add_tags_to_resource(
arn, TaggingService.convert_dict_to_tags_input(tags)
)
if arn.startswith("arn:aws:logs:"):
self.logs_backend.tag_resource(arn, tags)
else:
missing_resources.append(arn)
return {arn: missing_error for arn in missing_resources}

View File

@ -367,6 +367,13 @@ lambda:
- TestAccLambdaFunctionURL_Alias
- TestAccLambdaFunctionURL_basic
- TestAccLambdaFunctionURL_TwoURLs
logs:
- TestAccLogsDestination_
- TestAccLogsGroupDataSource_basic
- TestAccLogsGroupsDataSource_basic
- TestAccLogsGroup_basic
- TestAccLogsGroup_tags
- TestAccLogsStream
meta:
- TestAccMetaBillingServiceAccountDataSource
mq:

View File

@ -440,7 +440,7 @@ def test_create_log_group(kms_key_id):
create_logs_params["kmsKeyId"] = kms_key_id
# When
response = conn.create_log_group(**create_logs_params)
conn.create_log_group(**create_logs_params)
response = conn.describe_log_groups()
# Then
@ -992,16 +992,16 @@ def test_list_tags_log_group():
log_group_name = "dummy"
tags = {"tag_key_1": "tag_value_1", "tag_key_2": "tag_value_2"}
response = conn.create_log_group(logGroupName=log_group_name)
conn.create_log_group(logGroupName=log_group_name)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == {}
response = conn.delete_log_group(logGroupName=log_group_name)
response = conn.create_log_group(logGroupName=log_group_name, tags=tags)
conn.delete_log_group(logGroupName=log_group_name)
conn.create_log_group(logGroupName=log_group_name, tags=tags)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags
response = conn.delete_log_group(logGroupName=log_group_name)
conn.delete_log_group(logGroupName=log_group_name)
@mock_logs
@ -1009,47 +1009,43 @@ def test_tag_log_group():
conn = boto3.client("logs", TEST_REGION)
log_group_name = "dummy"
tags = {"tag_key_1": "tag_value_1"}
response = conn.create_log_group(logGroupName=log_group_name)
conn.create_log_group(logGroupName=log_group_name)
response = conn.tag_log_group(logGroupName=log_group_name, tags=tags)
conn.tag_log_group(logGroupName=log_group_name, tags=tags)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags
tags_with_added_value = {"tag_key_1": "tag_value_1", "tag_key_2": "tag_value_2"}
response = conn.tag_log_group(
logGroupName=log_group_name, tags={"tag_key_2": "tag_value_2"}
)
conn.tag_log_group(logGroupName=log_group_name, tags={"tag_key_2": "tag_value_2"})
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags_with_added_value
tags_with_updated_value = {"tag_key_1": "tag_value_XX", "tag_key_2": "tag_value_2"}
response = conn.tag_log_group(
logGroupName=log_group_name, tags={"tag_key_1": "tag_value_XX"}
)
conn.tag_log_group(logGroupName=log_group_name, tags={"tag_key_1": "tag_value_XX"})
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags_with_updated_value
response = conn.delete_log_group(logGroupName=log_group_name)
conn.delete_log_group(logGroupName=log_group_name)
@mock_logs
def test_untag_log_group():
conn = boto3.client("logs", TEST_REGION)
log_group_name = "dummy"
response = conn.create_log_group(logGroupName=log_group_name)
conn.create_log_group(logGroupName=log_group_name)
tags = {"tag_key_1": "tag_value_1", "tag_key_2": "tag_value_2"}
response = conn.tag_log_group(logGroupName=log_group_name, tags=tags)
conn.tag_log_group(logGroupName=log_group_name, tags=tags)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == tags
tags_to_remove = ["tag_key_1"]
remaining_tags = {"tag_key_2": "tag_value_2"}
response = conn.untag_log_group(logGroupName=log_group_name, tags=tags_to_remove)
conn.untag_log_group(logGroupName=log_group_name, tags=tags_to_remove)
response = conn.list_tags_log_group(logGroupName=log_group_name)
assert response["tags"] == remaining_tags
response = conn.delete_log_group(logGroupName=log_group_name)
conn.delete_log_group(logGroupName=log_group_name)
@mock_logs

View File

@ -0,0 +1,40 @@
import boto3
from moto import mock_logs
@mock_logs
def test_destination_tags():
logs = boto3.client("logs", "us-west-2")
destination_name = "test-destination"
role_arn = "arn:aws:iam::123456789012:role/my-subscription-role"
target_arn = "arn:aws:kinesis:us-east-1:123456789012:stream/my-kinesis-stream"
destination_arn = logs.put_destination(
destinationName=destination_name,
targetArn=target_arn,
roleArn=role_arn,
tags={"key1": "val1"},
)["destination"]["arn"]
_verify_tag_operations(destination_arn, logs)
@mock_logs
def test_log_groups_tags():
logs = boto3.client("logs", "us-west-2")
log_group_name = "test"
logs.create_log_group(logGroupName=log_group_name, tags={"key1": "val1"})
arn = logs.describe_log_groups()["logGroups"][0]["arn"]
_verify_tag_operations(arn, logs)
def _verify_tag_operations(arn, logs):
logs.tag_resource(resourceArn=arn, tags={"key2": "val2"})
tags = logs.list_tags_for_resource(resourceArn=arn)["tags"]
assert tags == {"key1": "val1", "key2": "val2"}
logs.untag_resource(resourceArn=arn, tagKeys=["key2"])
tags = logs.list_tags_for_resource(resourceArn=arn)["tags"]
assert tags == {"key1": "val1"}

View File

@ -6,14 +6,13 @@ def test_log_group_to_describe_dict():
# Given
region = "us-east-1"
name = "test-log-group"
tags = {"TestTag": "TestValue"}
kms_key_id = (
"arn:aws:kms:us-east-1:000000000000:key/51d81fab-b138-4bd2-8a09-07fd6d37224d"
)
kwargs = dict(kmsKeyId=kms_key_id)
# When
log_group = LogGroup(DEFAULT_ACCOUNT_ID, region, name, tags, **kwargs)
log_group = LogGroup(DEFAULT_ACCOUNT_ID, region, name, **kwargs)
describe_dict = log_group.to_describe_dict()
# Then

View File

@ -0,0 +1,41 @@
import boto3
import unittest
from moto import mock_logs
from moto import mock_resourcegroupstaggingapi
@mock_logs
@mock_resourcegroupstaggingapi
class TestLogsTagging(unittest.TestCase):
def setUp(self) -> None:
self.logs = boto3.client("logs", region_name="us-east-2")
self.rtapi = boto3.client("resourcegroupstaggingapi", region_name="us-east-2")
self.resources_tagged = []
self.resources_untagged = []
for i in range(3):
self.logs.create_log_group(logGroupName=f"test{i}", tags={"key1": "val1"})
self.arns = [lg["arn"] for lg in self.logs.describe_log_groups()["logGroups"]]
def test_get_resources_logs(self):
resp = self.rtapi.get_resources(ResourceTypeFilters=["logs"])
assert len(resp["ResourceTagMappingList"]) == 3
resp = self.rtapi.get_resources(ResourceTypeFilters=["logs:loggroup"])
assert len(resp["ResourceTagMappingList"]) == 3
def test_tag_resources_logs(self):
# WHEN
# we tag resources
self.rtapi.tag_resources(
ResourceARNList=self.arns,
Tags={"key2": "val2"},
)
# THEN
# we can retrieve the tags using the RDS API
def get_tags(arn):
return self.logs.list_tags_for_resource(resourceArn=arn)["tags"]
for arn in self.arns:
assert get_tags(arn) == {"key1": "val1", "key2": "val2"}