Prepare release 4.1.4 (#6013)
This commit is contained in:
parent
101cee8360
commit
de388cfd9e
34
CHANGELOG.md
34
CHANGELOG.md
@ -2,6 +2,40 @@ Moto Changelog
|
||||
==============
|
||||
|
||||
|
||||
4.1.4
|
||||
-----
|
||||
Docker Digest for 4.1.4: <autopopulateddigest>
|
||||
|
||||
New Services:
|
||||
* Neptune:
|
||||
* create_db_cluster()
|
||||
* create_global_cluster()
|
||||
* delete_db_cluster()
|
||||
* delete_global_cluster()
|
||||
* describe_db_clusters()
|
||||
* describe_global_clusters()
|
||||
* describe_orderable_db_instance_options()
|
||||
* modify_db_cluster()
|
||||
* start_db_cluster()
|
||||
|
||||
New Methods:
|
||||
* Glue:
|
||||
* get_jobs()
|
||||
* get_table_version()
|
||||
|
||||
Miscellaneous:
|
||||
* APIGateway: update_rest_api() now updates the policy-attribute
|
||||
* Athena now exposes an endpoint to store mock query results. See http://docs.getmoto.org/en/latest/docs/services/athena.html
|
||||
* CognitoIDP: The idToken now contains the cognito:groups attribute
|
||||
* DynamoDB: scan() now respects the Projection-attribute of a GSI/LSI
|
||||
* KMS: encrypt(), decrypt(), re_encrypt() now accept aliases as arguments
|
||||
* KMS: list_aliases() now supports the KeyId-parameter
|
||||
* Lambda: FIFO Queues are now supported event sources
|
||||
* RDS: create_option_group() now returns the OptionGroupArn-attribute
|
||||
* ResourceGroupsTaggingAPI: get_resources() now supports RDS Clusters and ClusterSnapshots
|
||||
* SSM now includes the parameters at /aws/service/ecs/optimized-ami
|
||||
|
||||
|
||||
4.1.3
|
||||
-----
|
||||
Docker Digest for 4.1.3: _sha256:3139cdae44d5da35d92d9e385cb878581eef8df3514cbda5cbc3e103488095a7_
|
||||
|
@ -1187,7 +1187,7 @@
|
||||
|
||||
## comprehend
|
||||
<details>
|
||||
<summary>10% implemented</summary>
|
||||
<summary>9% implemented</summary>
|
||||
|
||||
- [ ] batch_detect_dominant_language
|
||||
- [ ] batch_detect_entities
|
||||
@ -1197,13 +1197,17 @@
|
||||
- [ ] batch_detect_targeted_sentiment
|
||||
- [ ] classify_document
|
||||
- [ ] contains_pii_entities
|
||||
- [ ] create_dataset
|
||||
- [ ] create_document_classifier
|
||||
- [ ] create_endpoint
|
||||
- [X] create_entity_recognizer
|
||||
- [ ] create_flywheel
|
||||
- [ ] delete_document_classifier
|
||||
- [ ] delete_endpoint
|
||||
- [X] delete_entity_recognizer
|
||||
- [ ] delete_flywheel
|
||||
- [ ] delete_resource_policy
|
||||
- [ ] describe_dataset
|
||||
- [ ] describe_document_classification_job
|
||||
- [ ] describe_document_classifier
|
||||
- [ ] describe_dominant_language_detection_job
|
||||
@ -1211,6 +1215,8 @@
|
||||
- [ ] describe_entities_detection_job
|
||||
- [X] describe_entity_recognizer
|
||||
- [ ] describe_events_detection_job
|
||||
- [ ] describe_flywheel
|
||||
- [ ] describe_flywheel_iteration
|
||||
- [ ] describe_key_phrases_detection_job
|
||||
- [ ] describe_pii_entities_detection_job
|
||||
- [ ] describe_resource_policy
|
||||
@ -1225,6 +1231,7 @@
|
||||
- [ ] detect_syntax
|
||||
- [ ] detect_targeted_sentiment
|
||||
- [ ] import_model
|
||||
- [ ] list_datasets
|
||||
- [ ] list_document_classification_jobs
|
||||
- [ ] list_document_classifier_summaries
|
||||
- [ ] list_document_classifiers
|
||||
@ -1234,6 +1241,8 @@
|
||||
- [ ] list_entity_recognizer_summaries
|
||||
- [X] list_entity_recognizers
|
||||
- [ ] list_events_detection_jobs
|
||||
- [ ] list_flywheel_iteration_history
|
||||
- [ ] list_flywheels
|
||||
- [ ] list_key_phrases_detection_jobs
|
||||
- [ ] list_pii_entities_detection_jobs
|
||||
- [ ] list_sentiment_detection_jobs
|
||||
@ -1245,6 +1254,7 @@
|
||||
- [ ] start_dominant_language_detection_job
|
||||
- [ ] start_entities_detection_job
|
||||
- [ ] start_events_detection_job
|
||||
- [ ] start_flywheel_iteration
|
||||
- [ ] start_key_phrases_detection_job
|
||||
- [ ] start_pii_entities_detection_job
|
||||
- [ ] start_sentiment_detection_job
|
||||
@ -1262,6 +1272,7 @@
|
||||
- [X] tag_resource
|
||||
- [X] untag_resource
|
||||
- [ ] update_endpoint
|
||||
- [ ] update_flywheel
|
||||
</details>
|
||||
|
||||
## config
|
||||
@ -3101,7 +3112,7 @@
|
||||
|
||||
## glue
|
||||
<details>
|
||||
<summary>23% implemented</summary>
|
||||
<summary>25% implemented</summary>
|
||||
|
||||
- [X] batch_create_partition
|
||||
- [ ] batch_delete_connection
|
||||
@ -3134,7 +3145,7 @@
|
||||
- [ ] create_dev_endpoint
|
||||
- [X] create_job
|
||||
- [ ] create_ml_transform
|
||||
- [ ] create_partition
|
||||
- [X] create_partition
|
||||
- [ ] create_partition_index
|
||||
- [X] create_registry
|
||||
- [X] create_schema
|
||||
@ -3157,7 +3168,7 @@
|
||||
- [ ] delete_dev_endpoint
|
||||
- [ ] delete_job
|
||||
- [ ] delete_ml_transform
|
||||
- [ ] delete_partition
|
||||
- [X] delete_partition
|
||||
- [ ] delete_partition_index
|
||||
- [X] delete_registry
|
||||
- [ ] delete_resource_policy
|
||||
@ -3204,7 +3215,7 @@
|
||||
- [ ] get_ml_task_runs
|
||||
- [ ] get_ml_transform
|
||||
- [ ] get_ml_transforms
|
||||
- [ ] get_partition
|
||||
- [X] get_partition
|
||||
- [ ] get_partition_indexes
|
||||
- [X] get_partitions
|
||||
- [ ] get_plan
|
||||
@ -3297,7 +3308,7 @@
|
||||
- [ ] update_job
|
||||
- [ ] update_job_from_source_control
|
||||
- [ ] update_ml_transform
|
||||
- [ ] update_partition
|
||||
- [X] update_partition
|
||||
- [ ] update_registry
|
||||
- [X] update_schema
|
||||
- [ ] update_source_control_from_job
|
||||
@ -6618,18 +6629,22 @@
|
||||
|
||||
## timestream-write
|
||||
<details>
|
||||
<summary>100% implemented</summary>
|
||||
<summary>78% implemented</summary>
|
||||
|
||||
- [ ] create_batch_load_task
|
||||
- [X] create_database
|
||||
- [X] create_table
|
||||
- [X] delete_database
|
||||
- [X] delete_table
|
||||
- [ ] describe_batch_load_task
|
||||
- [X] describe_database
|
||||
- [X] describe_endpoints
|
||||
- [X] describe_table
|
||||
- [ ] list_batch_load_tasks
|
||||
- [X] list_databases
|
||||
- [X] list_tables
|
||||
- [X] list_tags_for_resource
|
||||
- [ ] resume_batch_load_task
|
||||
- [X] tag_resource
|
||||
- [X] untag_resource
|
||||
- [X] update_database
|
||||
@ -6830,6 +6845,7 @@
|
||||
- importexport
|
||||
- inspector
|
||||
- inspector2
|
||||
- internetmonitor
|
||||
- iot-jobs-data
|
||||
- iot-roborunner
|
||||
- iot1click-devices
|
||||
@ -6955,6 +6971,7 @@
|
||||
- support-app
|
||||
- synthetics
|
||||
- timestream-query
|
||||
- tnb
|
||||
- transfer
|
||||
- translate
|
||||
- voice-id
|
||||
|
@ -53,11 +53,11 @@ athena
|
||||
|
||||
Queries are not executed by Moto, so this call will always return 0 rows by default.
|
||||
|
||||
You can use a dedicated API to configure this. Moto has a queue that can be filled with the expected results.
|
||||
You can use a dedicated API to override this, by configuring a queue of expected results.
|
||||
|
||||
A request to `get_query_results` will take the first result from that queue, and assign it to the provided QueryExecutionId. Subsequent requests using the same QueryExecutionId will return the same result. Other requests using a different QueryExecutionId will take the next result from the queue, or return an empty result if the queue is empty.
|
||||
|
||||
Configuring this queue by making a HTTP request to `/moto-api/static/athena/query-results`. An example invocation looks like this:
|
||||
Configuring this queue by making an HTTP request to `/moto-api/static/athena/query-results`. An example invocation looks like this:
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
|
@ -35,6 +35,7 @@ comprehend
|
||||
- [ ] batch_detect_targeted_sentiment
|
||||
- [ ] classify_document
|
||||
- [ ] contains_pii_entities
|
||||
- [ ] create_dataset
|
||||
- [ ] create_document_classifier
|
||||
- [ ] create_endpoint
|
||||
- [X] create_entity_recognizer
|
||||
@ -42,10 +43,13 @@ comprehend
|
||||
The ClientRequestToken-parameter is not yet implemented
|
||||
|
||||
|
||||
- [ ] create_flywheel
|
||||
- [ ] delete_document_classifier
|
||||
- [ ] delete_endpoint
|
||||
- [X] delete_entity_recognizer
|
||||
- [ ] delete_flywheel
|
||||
- [ ] delete_resource_policy
|
||||
- [ ] describe_dataset
|
||||
- [ ] describe_document_classification_job
|
||||
- [ ] describe_document_classifier
|
||||
- [ ] describe_dominant_language_detection_job
|
||||
@ -53,6 +57,8 @@ comprehend
|
||||
- [ ] describe_entities_detection_job
|
||||
- [X] describe_entity_recognizer
|
||||
- [ ] describe_events_detection_job
|
||||
- [ ] describe_flywheel
|
||||
- [ ] describe_flywheel_iteration
|
||||
- [ ] describe_key_phrases_detection_job
|
||||
- [ ] describe_pii_entities_detection_job
|
||||
- [ ] describe_resource_policy
|
||||
@ -67,6 +73,7 @@ comprehend
|
||||
- [ ] detect_syntax
|
||||
- [ ] detect_targeted_sentiment
|
||||
- [ ] import_model
|
||||
- [ ] list_datasets
|
||||
- [ ] list_document_classification_jobs
|
||||
- [ ] list_document_classifier_summaries
|
||||
- [ ] list_document_classifiers
|
||||
@ -81,6 +88,8 @@ comprehend
|
||||
|
||||
|
||||
- [ ] list_events_detection_jobs
|
||||
- [ ] list_flywheel_iteration_history
|
||||
- [ ] list_flywheels
|
||||
- [ ] list_key_phrases_detection_jobs
|
||||
- [ ] list_pii_entities_detection_jobs
|
||||
- [ ] list_sentiment_detection_jobs
|
||||
@ -92,6 +101,7 @@ comprehend
|
||||
- [ ] start_dominant_language_detection_job
|
||||
- [ ] start_entities_detection_job
|
||||
- [ ] start_events_detection_job
|
||||
- [ ] start_flywheel_iteration
|
||||
- [ ] start_key_phrases_detection_job
|
||||
- [ ] start_pii_entities_detection_job
|
||||
- [ ] start_sentiment_detection_job
|
||||
@ -109,4 +119,5 @@ comprehend
|
||||
- [X] tag_resource
|
||||
- [X] untag_resource
|
||||
- [ ] update_endpoint
|
||||
- [ ] update_flywheel
|
||||
|
||||
|
@ -56,7 +56,7 @@ glue
|
||||
- [ ] create_dev_endpoint
|
||||
- [X] create_job
|
||||
- [ ] create_ml_transform
|
||||
- [ ] create_partition
|
||||
- [X] create_partition
|
||||
- [ ] create_partition_index
|
||||
- [X] create_registry
|
||||
- [X] create_schema
|
||||
@ -83,7 +83,7 @@ glue
|
||||
- [ ] delete_dev_endpoint
|
||||
- [ ] delete_job
|
||||
- [ ] delete_ml_transform
|
||||
- [ ] delete_partition
|
||||
- [X] delete_partition
|
||||
- [ ] delete_partition_index
|
||||
- [X] delete_registry
|
||||
- [ ] delete_resource_policy
|
||||
@ -130,7 +130,7 @@ glue
|
||||
- [ ] get_ml_task_runs
|
||||
- [ ] get_ml_transform
|
||||
- [ ] get_ml_transforms
|
||||
- [ ] get_partition
|
||||
- [X] get_partition
|
||||
- [ ] get_partition_indexes
|
||||
- [X] get_partitions
|
||||
|
||||
@ -235,7 +235,7 @@ glue
|
||||
- [ ] update_job
|
||||
- [ ] update_job_from_source_control
|
||||
- [ ] update_ml_transform
|
||||
- [ ] update_partition
|
||||
- [X] update_partition
|
||||
- [ ] update_registry
|
||||
- [X] update_schema
|
||||
|
||||
|
@ -27,16 +27,20 @@ timestream-write
|
||||
|
||||
|start-h3| Implemented features for this service |end-h3|
|
||||
|
||||
- [ ] create_batch_load_task
|
||||
- [X] create_database
|
||||
- [X] create_table
|
||||
- [X] delete_database
|
||||
- [X] delete_table
|
||||
- [ ] describe_batch_load_task
|
||||
- [X] describe_database
|
||||
- [X] describe_endpoints
|
||||
- [X] describe_table
|
||||
- [ ] list_batch_load_tasks
|
||||
- [X] list_databases
|
||||
- [X] list_tables
|
||||
- [X] list_tags_for_resource
|
||||
- [ ] resume_batch_load_task
|
||||
- [X] tag_resource
|
||||
- [X] untag_resource
|
||||
- [X] update_database
|
||||
|
@ -198,11 +198,11 @@ class AthenaBackend(BaseBackend):
|
||||
"""
|
||||
Queries are not executed by Moto, so this call will always return 0 rows by default.
|
||||
|
||||
You can use a dedicated API to configure this. Moto has a queue that can be filled with the expected results.
|
||||
You can use a dedicated API to override this, by configuring a queue of expected results.
|
||||
|
||||
A request to `get_query_results` will take the first result from that queue, and assign it to the provided QueryExecutionId. Subsequent requests using the same QueryExecutionId will return the same result. Other requests using a different QueryExecutionId will take the next result from the queue, or return an empty result if the queue is empty.
|
||||
|
||||
Configuring this queue by making a HTTP request to `/moto-api/static/athena/query-results`. An example invocation looks like this:
|
||||
Configuring this queue by making an HTTP request to `/moto-api/static/athena/query-results`. An example invocation looks like this:
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
|
@ -202,6 +202,14 @@ class GlueBackend(BaseBackend):
|
||||
table = self.get_table(database_name, table_name)
|
||||
table.delete_version(version_id)
|
||||
|
||||
def create_partition(self, database_name: str, table_name: str, part_input) -> None:
|
||||
table = self.get_table(database_name, table_name)
|
||||
table.create_partition(part_input)
|
||||
|
||||
def get_partition(self, database_name: str, table_name: str, values):
|
||||
table = self.get_table(database_name, table_name)
|
||||
return table.get_partition(values)
|
||||
|
||||
def get_partitions(self, database_name, table_name, expression):
|
||||
"""
|
||||
See https://docs.aws.amazon.com/glue/latest/webapi/API_GetPartitions.html
|
||||
@ -217,6 +225,18 @@ class GlueBackend(BaseBackend):
|
||||
table = self.get_table(database_name, table_name)
|
||||
return table.get_partitions(expression)
|
||||
|
||||
def update_partition(
|
||||
self, database_name, table_name, part_input, part_to_update
|
||||
) -> None:
|
||||
table = self.get_table(database_name, table_name)
|
||||
table.update_partition(part_to_update, part_input)
|
||||
|
||||
def delete_partition(
|
||||
self, database_name: str, table_name: str, part_to_delete
|
||||
) -> None:
|
||||
table = self.get_table(database_name, table_name)
|
||||
table.delete_partition(part_to_delete)
|
||||
|
||||
def create_crawler(
|
||||
self,
|
||||
name,
|
||||
|
@ -134,9 +134,7 @@ class GlueResponse(BaseResponse):
|
||||
table_name = self.parameters.get("TableName")
|
||||
values = self.parameters.get("PartitionValues")
|
||||
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
|
||||
p = table.get_partition(values)
|
||||
p = self.glue_backend.get_partition(database_name, table_name, values)
|
||||
|
||||
return json.dumps({"Partition": p.as_dict()})
|
||||
|
||||
@ -156,9 +154,7 @@ class GlueResponse(BaseResponse):
|
||||
table_name = self.parameters.get("TableName")
|
||||
part_input = self.parameters.get("PartitionInput")
|
||||
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
table.create_partition(part_input)
|
||||
|
||||
self.glue_backend.create_partition(database_name, table_name, part_input)
|
||||
return ""
|
||||
|
||||
def batch_create_partition(self):
|
||||
@ -181,9 +177,9 @@ class GlueResponse(BaseResponse):
|
||||
part_input = self.parameters.get("PartitionInput")
|
||||
part_to_update = self.parameters.get("PartitionValueList")
|
||||
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
table.update_partition(part_to_update, part_input)
|
||||
|
||||
self.glue_backend.update_partition(
|
||||
database_name, table_name, part_input, part_to_update
|
||||
)
|
||||
return ""
|
||||
|
||||
def batch_update_partition(self):
|
||||
@ -206,9 +202,7 @@ class GlueResponse(BaseResponse):
|
||||
table_name = self.parameters.get("TableName")
|
||||
part_to_delete = self.parameters.get("PartitionValues")
|
||||
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
table.delete_partition(part_to_delete)
|
||||
|
||||
self.glue_backend.delete_partition(database_name, table_name, part_to_delete)
|
||||
return ""
|
||||
|
||||
def batch_delete_partition(self):
|
||||
|
Loading…
Reference in New Issue
Block a user