diff --git a/moto/glue/models.py b/moto/glue/models.py index 08c0542a8..a81b01866 100644 --- a/moto/glue/models.py +++ b/moto/glue/models.py @@ -3,6 +3,7 @@ from collections import OrderedDict from datetime import datetime from moto.core import BaseBackend, BaseModel +from moto.core.models import get_account_id from moto.glue.exceptions import CrawlerRunningException, CrawlerNotRunningException from .exceptions import ( JsonRESTError, @@ -20,6 +21,7 @@ from .exceptions import ( ) from .utils import PartitionFilter from ..utilities.paginator import paginate +from ..utilities.tagging_service import TaggingService class GlueBackend(BaseBackend): @@ -43,6 +45,7 @@ class GlueBackend(BaseBackend): self.crawlers = OrderedDict() self.jobs = OrderedDict() self.job_runs = OrderedDict() + self.tagger = TaggingService() @staticmethod def default_vpc_endpoint_service(service_region, zones): @@ -137,6 +140,7 @@ class GlueBackend(BaseBackend): configuration=configuration, crawler_security_configuration=crawler_security_configuration, tags=tags, + backend=self, ) self.crawlers[name] = crawler @@ -209,6 +213,7 @@ class GlueBackend(BaseBackend): glue_version, number_of_workers, worker_type, + backend=self, ) return name @@ -230,6 +235,16 @@ class GlueBackend(BaseBackend): def list_jobs(self): return [job for _, job in self.jobs.items()] + def get_tags(self, resource_id): + return self.tagger.get_tag_dict_for_resource(resource_id) + + def tag_resource(self, resource_arn, tags): + tags = TaggingService.convert_dict_to_tags_input(tags or {}) + self.tagger.tag_resource(resource_arn, tags) + + def untag_resource(self, resource_arn, tag_keys): + self.tagger.untag_resource_using_names(resource_arn, tag_keys) + class FakeDatabase(BaseModel): def __init__(self, database_name, database_input): @@ -368,6 +383,7 @@ class FakeCrawler(BaseModel): configuration, crawler_security_configuration, tags, + backend, ): self.name = name self.role = role @@ -382,13 +398,15 @@ class FakeCrawler(BaseModel): self.lineage_configuration = lineage_configuration self.configuration = configuration self.crawler_security_configuration = crawler_security_configuration - self.tags = tags self.state = "READY" self.creation_time = datetime.utcnow() self.last_updated = self.creation_time self.version = 1 self.crawl_elapsed_time = 0 self.last_crawl_info = None + self.arn = f"arn:aws:glue:us-east-1:{get_account_id()}:crawler/{self.name}" + self.backend = backend + self.backend.tag_resource(self.arn, tags) def get_name(self): return self.name @@ -486,6 +504,7 @@ class FakeJob: glue_version=None, number_of_workers=None, worker_type=None, + backend=None, ): self.name = name self.description = description @@ -502,13 +521,15 @@ class FakeJob: self.state = "READY" self.max_capacity = max_capacity self.security_configuration = security_configuration - self.tags = tags self.notification_property = notification_property self.glue_version = glue_version self.number_of_workers = number_of_workers self.worker_type = worker_type self.created_on = datetime.utcnow() self.last_modified_on = datetime.utcnow() + self.arn = f"arn:aws:glue:us-east-1:{get_account_id()}:job/{self.name}" + self.backend = backend + self.backend.tag_resource(self.arn, tags) def get_name(self): return self.name diff --git a/moto/glue/responses.py b/moto/glue/responses.py index 3160c84ba..4c6b86fd4 100644 --- a/moto/glue/responses.py +++ b/moto/glue/responses.py @@ -328,7 +328,7 @@ class GlueResponse(BaseResponse): return [ crawler.get_name() for crawler in crawlers - if self.is_tags_match(crawler.tags, tags) + if self.is_tags_match(self, crawler.arn, tags) ] def start_crawler(self): @@ -420,13 +420,33 @@ class GlueResponse(BaseResponse): ) ) + def get_tags(self): + resource_arn = self.parameters.get("ResourceArn") + tags = self.glue_backend.get_tags(resource_arn) + return 200, {}, json.dumps({"Tags": tags}) + + def tag_resource(self): + resource_arn = self.parameters.get("ResourceArn") + tags = self.parameters.get("TagsToAdd", {}) + self.glue_backend.tag_resource(resource_arn, tags) + return 201, {}, "{}" + + def untag_resource(self): + resource_arn = self._get_param("ResourceArn") + tag_keys = self.parameters.get("TagsToRemove") + self.glue_backend.untag_resource(resource_arn, tag_keys) + return 200, {}, "{}" + def filter_jobs_by_tags(self, jobs, tags): if not tags: return [job.get_name() for job in jobs] - return [job.get_name() for job in jobs if self.is_tags_match(job.tags, tags)] + return [ + job.get_name() for job in jobs if self.is_tags_match(self, job.arn, tags) + ] @staticmethod - def is_tags_match(glue_resource_tags, tags): + def is_tags_match(self, resource_arn, tags): + glue_resource_tags = self.glue_backend.get_tags(resource_arn) mutual_keys = set(glue_resource_tags).intersection(tags) for key in mutual_keys: if glue_resource_tags[key] == tags[key]: diff --git a/tests/test_glue/test_glue.py b/tests/test_glue/test_glue.py index fa2395bd2..7adf95d4f 100644 --- a/tests/test_glue/test_glue.py +++ b/tests/test_glue/test_glue.py @@ -192,6 +192,30 @@ def test_list_jobs_with_tags(): response["JobNames"].should.have.length_of(1) +@mock_glue +def test_list_jobs_after_tagging(): + client = create_glue_client() + job_name = create_test_job(client) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:job/{job_name}" + + client.tag_resource(ResourceArn=resource_arn, TagsToAdd={"key1": "value1"}) + + response = client.list_jobs(Tags={"key1": "value1"}) + response["JobNames"].should.have.length_of(1) + + +@mock_glue +def test_list_jobs_after_removing_tag(): + client = create_glue_client() + job_name = create_test_job(client, {"key1": "value1"}) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:job/{job_name}" + + client.untag_resource(ResourceArn=resource_arn, TagsToRemove=["key1"]) + + response = client.list_jobs(Tags={"key1": "value1"}) + response["JobNames"].should.have.length_of(0) + + @mock_glue def test_list_jobs_next_token_logic_does_not_create_infinite_loop(): client = create_glue_client() @@ -281,6 +305,30 @@ def test_list_crawlers_with_tags(): response["CrawlerNames"].should.have.length_of(1) +@mock_glue +def test_list_crawlers_after_tagging(): + client = create_glue_client() + crawler_name = create_test_crawler(client) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:crawler/{crawler_name}" + + client.tag_resource(ResourceArn=resource_arn, TagsToAdd={"key1": "value1"}) + + response = client.list_crawlers(Tags={"key1": "value1"}) + response["CrawlerNames"].should.have.length_of(1) + + +@mock_glue +def test_list_crawlers_after_removing_tag(): + client = create_glue_client() + crawler_name = create_test_crawler(client, {"key1": "value1"}) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:crawler/{crawler_name}" + + client.untag_resource(ResourceArn=resource_arn, TagsToRemove=["key1"]) + + response = client.list_crawlers(Tags={"key1": "value1"}) + response["CrawlerNames"].should.have.length_of(0) + + @mock_glue def test_list_crawlers_next_token_logic_does_not_create_infinite_loop(): client = create_glue_client() @@ -291,3 +339,113 @@ def test_list_crawlers_next_token_logic_does_not_create_infinite_loop(): response = client.list_crawlers(NextToken=next_token) next_token = response.get("NextToken") assert not next_token + + +@mock_glue +def test_get_tags_job(): + client = create_glue_client() + job_name = create_test_job(client, {"key1": "value1", "key2": "value2"}) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:job/{job_name}" + + resp = client.get_tags(ResourceArn=resource_arn) + + resp.should.have.key("Tags").equals({"key1": "value1", "key2": "value2"}) + + +@mock_glue +def test_get_tags_jobs_no_tags(): + client = create_glue_client() + job_name = create_test_job(client) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:job/{job_name}" + + resp = client.get_tags(ResourceArn=resource_arn) + + resp.should.have.key("Tags").equals({}) + + +@mock_glue +def test_tag_glue_job(): + client = create_glue_client() + job_name = create_test_job(client) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:job/{job_name}" + + client.tag_resource( + ResourceArn=resource_arn, TagsToAdd={"key1": "value1", "key2": "value2"} + ) + + resp = client.get_tags(ResourceArn=resource_arn) + + resp.should.have.key("Tags").equals({"key1": "value1", "key2": "value2"}) + + +@mock_glue +def test_untag_glue_job(): + client = create_glue_client() + job_name = create_test_job(client) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:job/{job_name}" + + client.tag_resource( + ResourceArn=resource_arn, + TagsToAdd={"key1": "value1", "key2": "value2", "key3": "value3"}, + ) + + client.untag_resource(ResourceArn=resource_arn, TagsToRemove=["key2"]) + + resp = client.get_tags(ResourceArn=resource_arn) + + resp.should.have.key("Tags").equals({"key1": "value1", "key3": "value3"}) + + +@mock_glue +def test_get_tags_crawler(): + client = create_glue_client() + crawler_name = create_test_crawler(client, {"key1": "value1", "key2": "value2"}) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:crawler/{crawler_name}" + + resp = client.get_tags(ResourceArn=resource_arn) + + resp.should.have.key("Tags").equals({"key1": "value1", "key2": "value2"}) + + +@mock_glue +def test_get_tags_crawler_no_tags(): + client = create_glue_client() + crawler_name = create_test_crawler(client) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:crawler/{crawler_name}" + + resp = client.get_tags(ResourceArn=resource_arn) + + resp.should.have.key("Tags").equals({}) + + +@mock_glue +def test_tag_glue_crawler(): + client = create_glue_client() + crawler_name = create_test_crawler(client) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:crawler/{crawler_name}" + + client.tag_resource( + ResourceArn=resource_arn, TagsToAdd={"key1": "value1", "key2": "value2"} + ) + + resp = client.get_tags(ResourceArn=resource_arn) + + resp.should.have.key("Tags").equals({"key1": "value1", "key2": "value2"}) + + +@mock_glue +def test_untag_glue_crawler(): + client = create_glue_client() + crawler_name = create_test_crawler(client) + resource_arn = f"arn:aws:glue:us-east-1:123456789012:crawler/{crawler_name}" + + client.tag_resource( + ResourceArn=resource_arn, + TagsToAdd={"key1": "value1", "key2": "value2", "key3": "value3"}, + ) + + client.untag_resource(ResourceArn=resource_arn, TagsToRemove=["key2"]) + + resp = client.get_tags(ResourceArn=resource_arn) + + resp.should.have.key("Tags").equals({"key1": "value1", "key3": "value3"})