diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md index 7f34b7d53..71e1406cb 100644 --- a/IMPLEMENTATION_COVERAGE.md +++ b/IMPLEMENTATION_COVERAGE.md @@ -2003,7 +2003,7 @@ ## glue
-6% implemented +7% implemented - [ ] batch_create_partition - [ ] batch_delete_connection @@ -2138,7 +2138,7 @@ - [ ] resume_workflow_run - [ ] search_tables - [ ] start_blueprint_run -- [ ] start_crawler +- [X] start_crawler - [ ] start_crawler_schedule - [ ] start_export_labels_task_run - [ ] start_import_labels_task_run @@ -2147,7 +2147,7 @@ - [ ] start_ml_labeling_set_generation_task_run - [ ] start_trigger - [ ] start_workflow_run -- [ ] stop_crawler +- [X] stop_crawler - [ ] stop_crawler_schedule - [ ] stop_trigger - [ ] stop_workflow_run @@ -2716,24 +2716,29 @@ ## lambda
-48% implemented +41% implemented - [ ] add_layer_version_permission - [X] add_permission - [ ] create_alias +- [ ] create_code_signing_config - [X] create_event_source_mapping - [X] create_function - [ ] delete_alias +- [ ] delete_code_signing_config - [X] delete_event_source_mapping - [X] delete_function +- [ ] delete_function_code_signing_config - [X] delete_function_concurrency - [ ] delete_function_event_invoke_config - [ ] delete_layer_version - [ ] delete_provisioned_concurrency_config - [ ] get_account_settings - [ ] get_alias +- [ ] get_code_signing_config - [X] get_event_source_mapping - [X] get_function +- [ ] get_function_code_signing_config - [X] get_function_concurrency - [ ] get_function_configuration - [ ] get_function_event_invoke_config @@ -2745,9 +2750,11 @@ - [X] invoke - [ ] invoke_async - [ ] list_aliases +- [ ] list_code_signing_configs - [X] list_event_source_mappings - [ ] list_function_event_invoke_configs - [X] list_functions +- [ ] list_functions_by_code_signing_config - [ ] list_layer_versions - [X] list_layers - [ ] list_provisioned_concurrency_configs @@ -2755,6 +2762,7 @@ - [X] list_versions_by_function - [X] publish_layer_version - [ ] publish_version +- [ ] put_function_code_signing_config - [X] put_function_concurrency - [ ] put_function_event_invoke_config - [ ] put_provisioned_concurrency_config @@ -2763,6 +2771,7 @@ - [X] tag_resource - [X] untag_resource - [ ] update_alias +- [ ] update_code_signing_config - [X] update_event_source_mapping - [X] update_function_code - [X] update_function_configuration @@ -3208,6 +3217,7 @@ - [ ] copy_db_snapshot - [ ] copy_option_group - [ ] create_custom_availability_zone +- [ ] create_custom_db_engine_version - [X] create_db_cluster - [ ] create_db_cluster_endpoint - [ ] create_db_cluster_parameter_group @@ -3224,6 +3234,7 @@ - [ ] create_global_cluster - [X] create_option_group - [ ] delete_custom_availability_zone +- [ ] delete_custom_db_engine_version - [X] delete_db_cluster - [ ] delete_db_cluster_endpoint - [ ] delete_db_cluster_parameter_group @@ -3288,6 +3299,7 @@ - [X] list_tags_for_resource - [ ] modify_certificates - [ ] modify_current_db_cluster_capacity +- [ ] modify_custom_db_engine_version - [ ] modify_db_cluster - [ ] modify_db_cluster_endpoint - [ ] modify_db_cluster_parameter_group diff --git a/moto/glue/exceptions.py b/moto/glue/exceptions.py index a77b9b88e..a4a3d0c49 100644 --- a/moto/glue/exceptions.py +++ b/moto/glue/exceptions.py @@ -62,3 +62,15 @@ class CrawlerNotFoundException(EntityNotFoundException): class VersionNotFoundException(EntityNotFoundException): def __init__(self): super(VersionNotFoundException, self).__init__("Version not found.") + + +class CrawlerRunningException(GlueClientError): + def __init__(self, msg): + super(CrawlerRunningException, self).__init__("CrawlerRunningException", msg) + + +class CrawlerNotRunningException(GlueClientError): + def __init__(self, msg): + super(CrawlerNotRunningException, self).__init__( + "CrawlerNotRunningException", msg + ) diff --git a/moto/glue/models.py b/moto/glue/models.py index 296d5029d..426c69035 100644 --- a/moto/glue/models.py +++ b/moto/glue/models.py @@ -1,8 +1,10 @@ import time +from collections import OrderedDict from datetime import datetime from moto.core import BaseBackend, BaseModel -from collections import OrderedDict +from moto.glue.exceptions import CrawlerRunningException, CrawlerNotRunningException + from .exceptions import ( JsonRESTError, CrawlerAlreadyExistsException, @@ -122,6 +124,14 @@ class GlueBackend(BaseBackend): def get_crawlers(self): return [self.crawlers[key] for key in self.crawlers] if self.crawlers else [] + def start_crawler(self, name): + crawler = self.get_crawler(name) + crawler.start_crawler() + + def stop_crawler(self, name): + crawler = self.get_crawler(name) + crawler.stop_crawler() + def delete_crawler(self, name): try: del self.crawlers[name] @@ -311,6 +321,20 @@ class FakeCrawler(BaseModel): return data + def start_crawler(self): + if self.state == "RUNNING": + raise CrawlerRunningException( + f"Crawler with name {self.name} has already started" + ) + self.state = "RUNNING" + + def stop_crawler(self): + if self.state != "RUNNING": + raise CrawlerNotRunningException( + f"Crawler with name {self.name} isn't running" + ) + self.state = "STOPPING" + class LastCrawlInfo(BaseModel): def __init__( diff --git a/moto/glue/responses.py b/moto/glue/responses.py index d110eacc1..9fb07c0bb 100644 --- a/moto/glue/responses.py +++ b/moto/glue/responses.py @@ -303,6 +303,16 @@ class GlueResponse(BaseResponse): crawlers = self.glue_backend.get_crawlers() return json.dumps({"Crawlers": [crawler.as_dict() for crawler in crawlers]}) + def start_crawler(self): + name = self.parameters.get("Name") + self.glue_backend.start_crawler(name) + return "" + + def stop_crawler(self): + name = self.parameters.get("Name") + self.glue_backend.stop_crawler(name) + return "" + def delete_crawler(self): name = self.parameters.get("Name") self.glue_backend.delete_crawler(name) diff --git a/tests/test_glue/test_datacatalog.py b/tests/test_glue/test_datacatalog.py index 86737a578..64628e1a5 100644 --- a/tests/test_glue/test_datacatalog.py +++ b/tests/test_glue/test_datacatalog.py @@ -1117,6 +1117,60 @@ def test_get_crawlers_several_items(): crawlers[1].get("Name").should.equal(name_2) +@mock_glue +def test_start_crawler(): + client = boto3.client("glue", region_name="us-east-1") + name = "my_crawler_name" + helpers.create_crawler(client, name) + + client.start_crawler(Name=name) + + response = client.get_crawler(Name=name) + crawler = response["Crawler"] + + crawler.get("State").should.equal("RUNNING") + + +@mock_glue +def test_start_crawler_should_raise_exception_if_already_running(): + client = boto3.client("glue", region_name="us-east-1") + name = "my_crawler_name" + helpers.create_crawler(client, name) + + client.start_crawler(Name=name) + with pytest.raises(ClientError) as exc: + client.start_crawler(Name=name) + + exc.value.response["Error"]["Code"].should.equal("CrawlerRunningException") + + +@mock_glue +def test_stop_crawler(): + client = boto3.client("glue", region_name="us-east-1") + name = "my_crawler_name" + helpers.create_crawler(client, name) + client.start_crawler(Name=name) + + client.stop_crawler(Name=name) + + response = client.get_crawler(Name=name) + crawler = response["Crawler"] + + crawler.get("State").should.equal("STOPPING") + + +@mock_glue +def test_stop_crawler_should_raise_exception_if_not_running(): + client = boto3.client("glue", region_name="us-east-1") + name = "my_crawler_name" + helpers.create_crawler(client, name) + + with pytest.raises(ClientError) as exc: + client.stop_crawler(Name=name) + + exc.value.response["Error"]["Code"].should.equal("CrawlerNotRunningException") + + @mock_glue def test_delete_crawler(): client = boto3.client("glue", region_name="us-east-1")