Prep release 2.2.6 (#4238)
This commit is contained in:
parent
728c0c91b4
commit
67ec21eef8
1
.github/workflows/build.yml
vendored
1
.github/workflows/build.yml
vendored
@ -296,6 +296,7 @@ jobs:
|
||||
bin/create-report-cli
|
||||
cd ..
|
||||
- name: Archive TF logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: buildfolder-${{ matrix.part }}
|
||||
|
45
CHANGELOG.md
45
CHANGELOG.md
@ -4,11 +4,56 @@ Moto Changelog
|
||||
Unreleased
|
||||
-----
|
||||
|
||||
2.2.6
|
||||
-----
|
||||
General:
|
||||
* `pip install` will no longer log a warning when installing a service that does not have any dependencies
|
||||
Example: `pip install moto[acm]`
|
||||
|
||||
New Services:
|
||||
ElasticTranscoder:
|
||||
* create_pipeline
|
||||
* delete_pipeline
|
||||
* list_pipelines
|
||||
* read_pipeline
|
||||
* update_pipeline
|
||||
|
||||
New Methods:
|
||||
* DynamoDB:
|
||||
* describe_endpoints()
|
||||
|
||||
Miscellaneous:
|
||||
* AWSLambda now sends logs to CloudWatch when Docker encounters an error, to make debugging easier
|
||||
* AWSLambda: For all methods, the FunctionName-parameter can be either the Lambda name or the Lambda ARN
|
||||
* AWSLambda:list_functions() now returns only the latest version by default
|
||||
* AWSLambda:invoke() now returns the correct Payload for invocations that resulted in an error
|
||||
* CloudFormation now supports the creation of type AWS::IAM::ManagedPolicy
|
||||
* CloudFormation now supports the deletion of type AWS::IAM::InstanceProfile
|
||||
* CloudFormation now supports the deletion of type AWS::IAM::Role
|
||||
* CloudWatch:create_log_group() now has proper validation for the length of the logGroupName-parameter
|
||||
* CloudWatch:describe_log_groups() now has proper validation for the limit-parameter
|
||||
* CloudWatch:describe_log_streams() now has proper validation for the limit-parameter
|
||||
* CloudWatch:get_log_events() now has proper validation for the limit-parameter
|
||||
* CloudWatch:filter_log_events() now has proper validation for the limit-parameter
|
||||
* DynamoDB:update_item(): fixed a bug where an item was created, despite throwing an error
|
||||
* DynamoDB:update_item() now throws an error when both UpdateExpression and AttributeUpdates are supplied
|
||||
* EC2:modify_instance_attribute() now supports Attribute="disableApiTermination"
|
||||
* S3 now supports direct uploads using the requests-library without having to specify the 'Content-Type' header
|
||||
* S3 now supports creating S3 buckets that start with a service name, i.e. `iot-bucket`
|
||||
* S3 now returns the RequestID in every response
|
||||
* S3:list_parts() now supports the MaxPart-parameter
|
||||
* SQS:get_queue_attributes() now behaves correctly when the AttributeNames-parameter is not provided
|
||||
* SQS:receive_message() no longer accepts queue-names for the QueueUrl-parameter, as per AWS' spec
|
||||
* SQS: The sqs.Queue-class no longer accepts queue-names, only queue-URLs, as per AWS' spec
|
||||
|
||||
2.2.5
|
||||
-----
|
||||
General:
|
||||
* Python 3.9 is now officially supported
|
||||
|
||||
Known bugs:
|
||||
* SQS:get_queue_attributes() throws an error when the AttributeNames-parameter is not provided
|
||||
|
||||
New Methods:
|
||||
* DynamoDB (API v20111205, now deprecated)
|
||||
* UpdateItem
|
||||
|
@ -3329,7 +3329,7 @@
|
||||
|
||||
## dynamodb
|
||||
<details>
|
||||
<summary>54% implemented</summary>
|
||||
<summary>56% implemented</summary>
|
||||
|
||||
- [ ] batch_execute_statement
|
||||
- [X] batch_get_item
|
||||
@ -3343,7 +3343,7 @@
|
||||
- [X] describe_backup
|
||||
- [X] describe_continuous_backups
|
||||
- [ ] describe_contributor_insights
|
||||
- [ ] describe_endpoints
|
||||
- [X] describe_endpoints
|
||||
- [ ] describe_export
|
||||
- [ ] describe_global_table
|
||||
- [ ] describe_global_table_settings
|
||||
@ -4318,7 +4318,7 @@
|
||||
|
||||
## emr
|
||||
<details>
|
||||
<summary>42% implemented</summary>
|
||||
<summary>40% implemented</summary>
|
||||
|
||||
- [ ] add_instance_fleet
|
||||
- [X] add_instance_groups
|
||||
@ -4338,6 +4338,7 @@
|
||||
- [ ] describe_security_configuration
|
||||
- [X] describe_step
|
||||
- [ ] describe_studio
|
||||
- [ ] get_auto_termination_policy
|
||||
- [ ] get_block_public_access_configuration
|
||||
- [ ] get_managed_scaling_policy
|
||||
- [ ] get_studio_session_mapping
|
||||
@ -4356,9 +4357,11 @@
|
||||
- [ ] modify_instance_fleet
|
||||
- [X] modify_instance_groups
|
||||
- [X] put_auto_scaling_policy
|
||||
- [ ] put_auto_termination_policy
|
||||
- [ ] put_block_public_access_configuration
|
||||
- [ ] put_managed_scaling_policy
|
||||
- [X] remove_auto_scaling_policy
|
||||
- [ ] remove_auto_termination_policy
|
||||
- [ ] remove_managed_scaling_policy
|
||||
- [X] remove_tags
|
||||
- [X] run_job_flow
|
||||
@ -9419,7 +9422,7 @@
|
||||
|
||||
## s3
|
||||
<details>
|
||||
<summary>52% implemented</summary>
|
||||
<summary>55% implemented</summary>
|
||||
|
||||
- [X] abort_multipart_upload
|
||||
- [X] complete_multipart_upload
|
||||
@ -9482,9 +9485,6 @@
|
||||
- [X] list_buckets
|
||||
- [ ] list_multipart_uploads
|
||||
- [X] list_object_versions
|
||||
- [ ] list_objects
|
||||
- [ ] list_objects_v2
|
||||
- [ ] list_parts
|
||||
- [X] list_objects
|
||||
- [X] list_objects_v2
|
||||
- [X] list_parts
|
||||
|
@ -1714,7 +1714,7 @@ class S3Backend(BaseBackend):
|
||||
raise NoSuchUpload(upload_id=multipart_id)
|
||||
del bucket.multiparts[multipart_id]
|
||||
|
||||
def list_multipart(
|
||||
def list_parts(
|
||||
self, bucket_name, multipart_id, part_number_marker=0, max_parts=1000
|
||||
):
|
||||
bucket = self.get_bucket(bucket_name)
|
||||
@ -1774,7 +1774,7 @@ class S3Backend(BaseBackend):
|
||||
src_value = src_value[start_byte : end_byte + 1]
|
||||
return multipart.set_part(part_id, src_value)
|
||||
|
||||
def prefix_query(self, bucket, prefix, delimiter):
|
||||
def list_objects(self, bucket, prefix, delimiter):
|
||||
key_results = set()
|
||||
folder_results = set()
|
||||
if prefix:
|
||||
@ -1807,6 +1807,20 @@ class S3Backend(BaseBackend):
|
||||
|
||||
return key_results, folder_results
|
||||
|
||||
def list_objects_v2(self, bucket, prefix, delimiter):
|
||||
result_keys, result_folders = self.list_objects(bucket, prefix, delimiter)
|
||||
# sort the combination of folders and keys into lexicographical order
|
||||
all_keys = result_keys + result_folders
|
||||
all_keys.sort(key=self._get_name)
|
||||
return all_keys
|
||||
|
||||
@staticmethod
|
||||
def _get_name(key):
|
||||
if isinstance(key, FakeKey):
|
||||
return key.name
|
||||
else:
|
||||
return key
|
||||
|
||||
def _set_delete_marker(self, bucket_name, key_name):
|
||||
bucket = self.get_bucket(bucket_name)
|
||||
delete_marker = FakeDeleteMarker(key=bucket.keys[key_name])
|
||||
|
@ -522,7 +522,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
delimiter = querystring.get("delimiter", [None])[0]
|
||||
max_keys = int(querystring.get("max-keys", [1000])[0])
|
||||
marker = querystring.get("marker", [None])[0]
|
||||
result_keys, result_folders = self.backend.prefix_query(
|
||||
result_keys, result_folders = self.backend.list_objects(
|
||||
bucket, prefix, delimiter
|
||||
)
|
||||
|
||||
@ -572,18 +572,12 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
if prefix and isinstance(prefix, bytes):
|
||||
prefix = prefix.decode("utf-8")
|
||||
delimiter = querystring.get("delimiter", [None])[0]
|
||||
result_keys, result_folders = self.backend.prefix_query(
|
||||
bucket, prefix, delimiter
|
||||
)
|
||||
all_keys = self.backend.list_objects_v2(bucket, prefix, delimiter)
|
||||
|
||||
fetch_owner = querystring.get("fetch-owner", [False])[0]
|
||||
max_keys = int(querystring.get("max-keys", [1000])[0])
|
||||
start_after = querystring.get("start-after", [None])[0]
|
||||
|
||||
# sort the combination of folders and keys into lexicographical order
|
||||
all_keys = result_keys + result_folders
|
||||
all_keys.sort(key=self._get_name)
|
||||
|
||||
if continuation_token or start_after:
|
||||
limit = continuation_token or start_after
|
||||
all_keys = self._get_results_from_token(all_keys, limit)
|
||||
@ -609,13 +603,6 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
start_after=None if continuation_token else start_after,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_name(key):
|
||||
if isinstance(key, FakeKey):
|
||||
return key.name
|
||||
else:
|
||||
return key
|
||||
|
||||
@staticmethod
|
||||
def _split_truncated_keys(truncated_keys):
|
||||
result_keys = []
|
||||
@ -1255,7 +1242,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
|
||||
if not (0 <= max_parts <= 2147483647):
|
||||
raise InvalidMaxPartArgument("max-parts", 0, 2147483647)
|
||||
|
||||
parts = self.backend.list_multipart(
|
||||
parts = self.backend.list_parts(
|
||||
bucket_name,
|
||||
upload_id,
|
||||
part_number_marker=part_number_marker,
|
||||
|
Loading…
Reference in New Issue
Block a user