Prep release 4.0.13 (#5813)
This commit is contained in:
parent
d68fb04ee1
commit
2e08c321d9
20
CHANGELOG.md
20
CHANGELOG.md
@ -1,6 +1,26 @@
|
||||
Moto Changelog
|
||||
==============
|
||||
|
||||
|
||||
4.0.13
|
||||
-----
|
||||
Docker Digest for 4.0.13: <autopopulateddigest>
|
||||
|
||||
New Methods:
|
||||
* EC2:
|
||||
* get_password_data()
|
||||
* Sagemaker:
|
||||
* update_pipeline()
|
||||
* SecretsManager:
|
||||
* cancel_rotate_secret()
|
||||
|
||||
Miscellaneous:
|
||||
* CloudWatch: put_metric_data() now supports the StatisticValues-parameter
|
||||
* CognitoIDP: sign_out() now also invalidates the AccessToken
|
||||
* IAM: get_account_authorization_details() now returns the Tags-attribute
|
||||
* IOT: create_keys_and_certificate() now creates valid certificates, instead of random data
|
||||
|
||||
|
||||
4.0.12
|
||||
-----
|
||||
Docker Digest for 4.0.12: _sha256:06916d3f310c68fd445468f06d6d4ae6f855e7f2b80e007a90bd11eeb421b5ed_
|
||||
|
@ -2160,7 +2160,7 @@
|
||||
- [X] get_managed_prefix_list_entries
|
||||
- [ ] get_network_insights_access_scope_analysis_findings
|
||||
- [ ] get_network_insights_access_scope_content
|
||||
- [x] get_password_data
|
||||
- [X] get_password_data
|
||||
- [ ] get_reserved_instances_exchange_quote
|
||||
- [ ] get_serial_console_access_status
|
||||
- [ ] get_spot_placement_scores
|
||||
@ -2731,7 +2731,7 @@
|
||||
|
||||
## emr
|
||||
<details>
|
||||
<summary>42% implemented</summary>
|
||||
<summary>41% implemented</summary>
|
||||
|
||||
- [ ] add_instance_fleet
|
||||
- [X] add_instance_groups
|
||||
@ -2753,6 +2753,7 @@
|
||||
- [ ] describe_studio
|
||||
- [ ] get_auto_termination_policy
|
||||
- [ ] get_block_public_access_configuration
|
||||
- [ ] get_cluster_session_credentials
|
||||
- [ ] get_managed_scaling_policy
|
||||
- [ ] get_studio_session_mapping
|
||||
- [X] list_bootstrap_actions
|
||||
@ -3924,7 +3925,7 @@
|
||||
|
||||
## kinesisvideo
|
||||
<details>
|
||||
<summary>20% implemented</summary>
|
||||
<summary>17% implemented</summary>
|
||||
|
||||
- [ ] create_signaling_channel
|
||||
- [X] create_stream
|
||||
@ -3932,6 +3933,8 @@
|
||||
- [X] delete_stream
|
||||
- [ ] describe_edge_configuration
|
||||
- [ ] describe_image_generation_configuration
|
||||
- [ ] describe_mapped_resource_configuration
|
||||
- [ ] describe_media_storage_configuration
|
||||
- [ ] describe_notification_configuration
|
||||
- [ ] describe_signaling_channel
|
||||
- [X] describe_stream
|
||||
@ -3948,6 +3951,7 @@
|
||||
- [ ] untag_stream
|
||||
- [ ] update_data_retention
|
||||
- [ ] update_image_generation_configuration
|
||||
- [ ] update_media_storage_configuration
|
||||
- [ ] update_notification_configuration
|
||||
- [ ] update_signaling_channel
|
||||
- [ ] update_stream
|
||||
@ -5766,6 +5770,7 @@
|
||||
- [ ] import_hub_content
|
||||
- [ ] list_actions
|
||||
- [ ] list_algorithms
|
||||
- [ ] list_aliases
|
||||
- [ ] list_app_image_configs
|
||||
- [ ] list_apps
|
||||
- [ ] list_artifacts
|
||||
@ -5876,6 +5881,7 @@
|
||||
- [ ] update_feature_metadata
|
||||
- [ ] update_hub
|
||||
- [ ] update_image
|
||||
- [ ] update_image_version
|
||||
- [ ] update_inference_experiment
|
||||
- [ ] update_model_card
|
||||
- [ ] update_model_package
|
||||
@ -5883,7 +5889,7 @@
|
||||
- [ ] update_monitoring_schedule
|
||||
- [ ] update_notebook_instance
|
||||
- [ ] update_notebook_instance_lifecycle_config
|
||||
- [ ] update_pipeline
|
||||
- [X] update_pipeline
|
||||
- [ ] update_pipeline_execution
|
||||
- [ ] update_project
|
||||
- [ ] update_space
|
||||
@ -5913,9 +5919,9 @@
|
||||
|
||||
## secretsmanager
|
||||
<details>
|
||||
<summary>68% implemented</summary>
|
||||
<summary>72% implemented</summary>
|
||||
|
||||
- [ ] cancel_rotate_secret
|
||||
- [X] cancel_rotate_secret
|
||||
- [X] create_secret
|
||||
- [ ] delete_resource_policy
|
||||
- [X] delete_secret
|
||||
@ -6721,6 +6727,7 @@
|
||||
- keyspaces
|
||||
- kinesis-video-media
|
||||
- kinesis-video-signaling
|
||||
- kinesis-video-webrtc-storage
|
||||
- kinesisanalytics
|
||||
- kinesisanalyticsv2
|
||||
- lakeformation
|
||||
@ -6729,6 +6736,7 @@
|
||||
- lexv2-models
|
||||
- lexv2-runtime
|
||||
- license-manager
|
||||
- license-manager-linux-subscriptions
|
||||
- license-manager-user-subscriptions
|
||||
- lightsail
|
||||
- location
|
||||
@ -6835,4 +6843,4 @@
|
||||
- workspaces
|
||||
- workspaces-web
|
||||
- xray
|
||||
</details>
|
||||
</details>
|
@ -33,14 +33,14 @@ For example, we have the following code we want to test:
|
||||
|
||||
import boto3
|
||||
|
||||
class MyModel(object):
|
||||
class MyModel:
|
||||
def __init__(self, name, value):
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
def save(self):
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value)
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
s3.put_object(Bucket="mybucket", Key=self.name, Body=self.value)
|
||||
|
||||
There are several ways to verify that the value will be persisted successfully.
|
||||
|
||||
@ -57,17 +57,17 @@ With a decorator wrapping, all the calls to S3 are automatically mocked out.
|
||||
|
||||
@mock_s3
|
||||
def test_my_model_save():
|
||||
conn = boto3.resource('s3', region_name='us-east-1')
|
||||
conn = boto3.resource("s3", region_name="us-east-1")
|
||||
# We need to create the bucket since this is all in Moto's 'virtual' AWS account
|
||||
conn.create_bucket(Bucket='mybucket')
|
||||
conn.create_bucket(Bucket="mybucket")
|
||||
|
||||
model_instance = MyModel('steve', 'is awesome')
|
||||
model_instance = MyModel("steve", "is awesome")
|
||||
model_instance.save()
|
||||
|
||||
body = conn.Object('mybucket', 'steve').get()[
|
||||
'Body'].read().decode("utf-8")
|
||||
body = conn.Object("mybucket", "steve").get()[
|
||||
"Body"].read().decode("utf-8")
|
||||
|
||||
assert body == 'is awesome'
|
||||
assert body == "is awesome"
|
||||
|
||||
Context manager
|
||||
~~~~~~~~~~~~~~~
|
||||
@ -78,16 +78,16 @@ Same as the Decorator, every call inside the ``with`` statement is mocked out.
|
||||
|
||||
def test_my_model_save():
|
||||
with mock_s3():
|
||||
conn = boto3.resource('s3', region_name='us-east-1')
|
||||
conn.create_bucket(Bucket='mybucket')
|
||||
conn = boto3.resource("s3", region_name="us-east-1")
|
||||
conn.create_bucket(Bucket="mybucket")
|
||||
|
||||
model_instance = MyModel('steve', 'is awesome')
|
||||
model_instance = MyModel("steve", "is awesome")
|
||||
model_instance.save()
|
||||
|
||||
body = conn.Object('mybucket', 'steve').get()[
|
||||
'Body'].read().decode("utf-8")
|
||||
body = conn.Object("mybucket", "steve").get()[
|
||||
"Body"].read().decode("utf-8")
|
||||
|
||||
assert body == 'is awesome'
|
||||
assert body == "is awesome"
|
||||
|
||||
Raw
|
||||
~~~
|
||||
@ -100,16 +100,16 @@ You can also start and stop the mocking manually.
|
||||
mock = mock_s3()
|
||||
mock.start()
|
||||
|
||||
conn = boto3.resource('s3', region_name='us-east-1')
|
||||
conn.create_bucket(Bucket='mybucket')
|
||||
conn = boto3.resource("s3", region_name="us-east-1")
|
||||
conn.create_bucket(Bucket="mybucket")
|
||||
|
||||
model_instance = MyModel('steve', 'is awesome')
|
||||
model_instance = MyModel("steve", "is awesome")
|
||||
model_instance.save()
|
||||
|
||||
body = conn.Object('mybucket', 'steve').get()[
|
||||
'Body'].read().decode("utf-8")
|
||||
body = conn.Object("mybucket", "steve").get()[
|
||||
"Body"].read().decode("utf-8")
|
||||
|
||||
assert body == 'is awesome'
|
||||
assert body == "is awesome"
|
||||
|
||||
mock.stop()
|
||||
|
||||
@ -125,18 +125,18 @@ If you use `unittest`_ to run tests, and you want to use `moto` inside `setUp`,
|
||||
import boto3
|
||||
|
||||
def func_to_test(bucket_name, key, content):
|
||||
s3 = boto3.resource('s3')
|
||||
s3 = boto3.resource("s3")
|
||||
object = s3.Object(bucket_name, key)
|
||||
object.put(Body=content)
|
||||
|
||||
class MyTest(unittest.TestCase):
|
||||
mock_s3 = mock_s3()
|
||||
bucket_name = 'test-bucket'
|
||||
bucket_name = "test-bucket"
|
||||
def setUp(self):
|
||||
self.mock_s3.start()
|
||||
|
||||
# you can use boto3.client('s3') if you prefer
|
||||
s3 = boto3.resource('s3')
|
||||
# you can use boto3.client("s3") if you prefer
|
||||
s3 = boto3.resource("s3")
|
||||
bucket = s3.Bucket(self.bucket_name)
|
||||
bucket.create()
|
||||
|
||||
@ -145,15 +145,15 @@ If you use `unittest`_ to run tests, and you want to use `moto` inside `setUp`,
|
||||
|
||||
def test(self):
|
||||
content = b"abc"
|
||||
key = '/path/to/obj'
|
||||
key = "/path/to/obj"
|
||||
|
||||
# run the file which uploads to S3
|
||||
func_to_test(self.bucket_name, key, content)
|
||||
|
||||
# check the file was uploaded as expected
|
||||
s3 = boto3.resource('s3')
|
||||
s3 = boto3.resource("s3")
|
||||
object = s3.Object(self.bucket_name, key)
|
||||
actual = object.get()['Body'].read()
|
||||
actual = object.get()["Body"].read()
|
||||
self.assertEqual(actual, content)
|
||||
|
||||
Class Decorator
|
||||
@ -231,19 +231,19 @@ Here is an example:
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
@pytest.fixture(scope="function")
|
||||
def aws_credentials():
|
||||
"""Mocked AWS Credentials for moto."""
|
||||
os.environ['AWS_ACCESS_KEY_ID'] = 'testing'
|
||||
os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing'
|
||||
os.environ['AWS_SECURITY_TOKEN'] = 'testing'
|
||||
os.environ['AWS_SESSION_TOKEN'] = 'testing'
|
||||
os.environ['AWS_DEFAULT_REGION'] = 'us-east-1'
|
||||
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
|
||||
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
|
||||
os.environ["AWS_SECURITY_TOKEN"] = "testing"
|
||||
os.environ["AWS_SESSION_TOKEN"] = "testing"
|
||||
os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
@pytest.fixture(scope="function")
|
||||
def s3(aws_credentials):
|
||||
with mock_s3():
|
||||
yield boto3.client('s3', region_name='us-east-1')
|
||||
yield boto3.client("s3", region_name="us-east-1")
|
||||
|
||||
|
||||
In the code sample above, all of the AWS/mocked fixtures take in a parameter of `aws_credentials`,
|
||||
@ -260,8 +260,8 @@ Next, once you need to do anything with the mocked AWS environment, do something
|
||||
s3.create_bucket(Bucket="somebucket")
|
||||
|
||||
result = s3.list_buckets()
|
||||
assert len(result['Buckets']) == 1
|
||||
assert result['Buckets'][0]['Name'] == 'somebucket'
|
||||
assert len(result["Buckets"]) == 1
|
||||
assert result["Buckets"][0]["Name"] == "somebucket"
|
||||
|
||||
What about those pesky imports
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -478,7 +478,7 @@ ec2
|
||||
- [X] get_managed_prefix_list_entries
|
||||
- [ ] get_network_insights_access_scope_analysis_findings
|
||||
- [ ] get_network_insights_access_scope_content
|
||||
- [x] get_password_data
|
||||
- [X] get_password_data
|
||||
- [ ] get_reserved_instances_exchange_quote
|
||||
- [ ] get_serial_console_access_status
|
||||
- [ ] get_spot_placement_scores
|
||||
|
@ -45,6 +45,7 @@ emr
|
||||
- [ ] describe_studio
|
||||
- [ ] get_auto_termination_policy
|
||||
- [ ] get_block_public_access_configuration
|
||||
- [ ] get_cluster_session_credentials
|
||||
- [ ] get_managed_scaling_policy
|
||||
- [ ] get_studio_session_mapping
|
||||
- [X] list_bootstrap_actions
|
||||
|
@ -35,6 +35,8 @@ kinesisvideo
|
||||
|
||||
- [ ] describe_edge_configuration
|
||||
- [ ] describe_image_generation_configuration
|
||||
- [ ] describe_mapped_resource_configuration
|
||||
- [ ] describe_media_storage_configuration
|
||||
- [ ] describe_notification_configuration
|
||||
- [ ] describe_signaling_channel
|
||||
- [X] describe_stream
|
||||
@ -55,6 +57,7 @@ kinesisvideo
|
||||
- [ ] untag_stream
|
||||
- [ ] update_data_retention
|
||||
- [ ] update_image_generation_configuration
|
||||
- [ ] update_media_storage_configuration
|
||||
- [ ] update_notification_configuration
|
||||
- [ ] update_signaling_channel
|
||||
- [ ] update_stream
|
||||
|
@ -196,6 +196,7 @@ sagemaker
|
||||
- [ ] import_hub_content
|
||||
- [ ] list_actions
|
||||
- [ ] list_algorithms
|
||||
- [ ] list_aliases
|
||||
- [ ] list_app_image_configs
|
||||
- [ ] list_apps
|
||||
- [ ] list_artifacts
|
||||
@ -306,6 +307,7 @@ sagemaker
|
||||
- [ ] update_feature_metadata
|
||||
- [ ] update_hub
|
||||
- [ ] update_image
|
||||
- [ ] update_image_version
|
||||
- [ ] update_inference_experiment
|
||||
- [ ] update_model_card
|
||||
- [ ] update_model_package
|
||||
|
@ -25,7 +25,7 @@ secretsmanager
|
||||
|
||||
|start-h3| Implemented features for this service |end-h3|
|
||||
|
||||
- [ ] cancel_rotate_secret
|
||||
- [X] cancel_rotate_secret
|
||||
- [X] create_secret
|
||||
- [ ] delete_resource_policy
|
||||
- [X] delete_secret
|
||||
|
@ -38,6 +38,8 @@ class IoTDataPlaneResponse(BaseResponse):
|
||||
|
||||
def publish(self):
|
||||
topic = self.path.split("/topics/")[-1]
|
||||
# a uri parameter containing forward slashes is not correctly url encoded when we're running in server mode.
|
||||
# https://github.com/pallets/flask/issues/900
|
||||
topic = unquote(topic) if "%" in topic else topic
|
||||
self.iotdata_backend.publish(topic=topic, payload=self.body)
|
||||
return json.dumps(dict())
|
||||
|
Loading…
Reference in New Issue
Block a user