S3/DynamoDB: Improve PartiQL tests (#6897)

This commit is contained in:
Bert Blommers 2023-10-10 15:44:00 +00:00 committed by GitHub
parent 5cd288b42c
commit 8ea041029a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 336 additions and 209 deletions

View File

@ -42,4 +42,4 @@ jobs:
env: env:
MOTO_TEST_ALLOW_AWS_REQUEST: ${{ true }} MOTO_TEST_ALLOW_AWS_REQUEST: ${{ true }}
run: | run: |
pytest -sv tests/test_ec2/ tests/test_lakeformation/ tests/test_ses/ tests/test_s3 -m aws_verified pytest -sv tests/test_dynamodb/ tests/test_ec2/ tests/test_lakeformation/ tests/test_ses/ tests/test_s3 -m aws_verified

View File

@ -52,7 +52,7 @@ all =
openapi-spec-validator>=0.2.8 openapi-spec-validator>=0.2.8
pyparsing>=3.0.7 pyparsing>=3.0.7
jsondiff>=1.1.2 jsondiff>=1.1.2
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
aws-xray-sdk!=0.96,>=0.93 aws-xray-sdk!=0.96,>=0.93
setuptools setuptools
multipart multipart
@ -67,7 +67,7 @@ proxy =
openapi-spec-validator>=0.2.8 openapi-spec-validator>=0.2.8
pyparsing>=3.0.7 pyparsing>=3.0.7
jsondiff>=1.1.2 jsondiff>=1.1.2
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
aws-xray-sdk!=0.96,>=0.93 aws-xray-sdk!=0.96,>=0.93
setuptools setuptools
multipart multipart
@ -82,7 +82,7 @@ server =
openapi-spec-validator>=0.2.8 openapi-spec-validator>=0.2.8
pyparsing>=3.0.7 pyparsing>=3.0.7
jsondiff>=1.1.2 jsondiff>=1.1.2
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
aws-xray-sdk!=0.96,>=0.93 aws-xray-sdk!=0.96,>=0.93
setuptools setuptools
flask!=2.2.0,!=2.2.1 flask!=2.2.0,!=2.2.1
@ -116,7 +116,7 @@ cloudformation =
openapi-spec-validator>=0.2.8 openapi-spec-validator>=0.2.8
pyparsing>=3.0.7 pyparsing>=3.0.7
jsondiff>=1.1.2 jsondiff>=1.1.2
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
aws-xray-sdk!=0.96,>=0.93 aws-xray-sdk!=0.96,>=0.93
setuptools setuptools
cloudfront = cloudfront =
@ -139,10 +139,10 @@ dms =
ds = sshpubkeys>=3.1.0 ds = sshpubkeys>=3.1.0
dynamodb = dynamodb =
docker>=3.0.0 docker>=3.0.0
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
dynamodbstreams = dynamodbstreams =
docker>=3.0.0 docker>=3.0.0
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
ebs = sshpubkeys>=3.1.0 ebs = sshpubkeys>=3.1.0
ec2 = sshpubkeys>=3.1.0 ec2 = sshpubkeys>=3.1.0
ec2instanceconnect = ec2instanceconnect =
@ -205,15 +205,15 @@ resourcegroupstaggingapi =
openapi-spec-validator>=0.2.8 openapi-spec-validator>=0.2.8
pyparsing>=3.0.7 pyparsing>=3.0.7
jsondiff>=1.1.2 jsondiff>=1.1.2
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
route53 = route53 =
route53resolver = sshpubkeys>=3.1.0 route53resolver = sshpubkeys>=3.1.0
s3 = s3 =
PyYAML>=5.1 PyYAML>=5.1
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
s3crc32c = s3crc32c =
PyYAML>=5.1 PyYAML>=5.1
py-partiql-parser==0.3.7 py-partiql-parser==0.3.8
crc32c crc32c
s3control = s3control =
sagemaker = sagemaker =

View File

@ -1 +1,57 @@
# This file is intentionally left blank. import boto3
import os
from functools import wraps
from moto import mock_dynamodb
from uuid import uuid4
def dynamodb_aws_verified(func):
"""
Function that is verified to work against AWS.
Can be run against AWS at any time by setting:
MOTO_TEST_ALLOW_AWS_REQUEST=true
If this environment variable is not set, the function runs in a `mock_dynamodb` context.
This decorator will:
- Create a table
- Run the test and pass the table_name as an argument
- Delete the table
"""
@wraps(func)
def pagination_wrapper():
client = boto3.client("dynamodb", region_name="us-east-1")
table_name = str(uuid4())[0:6]
allow_aws_request = (
os.environ.get("MOTO_TEST_ALLOW_AWS_REQUEST", "false").lower() == "true"
)
if allow_aws_request:
print(f"Test {func} will create DynamoDB Table {table_name}")
resp = create_table_and_test(table_name, client)
else:
with mock_dynamodb():
resp = create_table_and_test(table_name, client)
return resp
def create_table_and_test(table_name, client):
client.create_table(
TableName=table_name,
KeySchema=[{"AttributeName": "pk", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "pk", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 5},
Tags=[{"Key": "environment", "Value": "moto_tests"}],
)
waiter = client.get_waiter("table_exists")
waiter.wait(TableName=table_name)
try:
resp = func(table_name)
finally:
### CLEANUP ###
client.delete_table(TableName=table_name)
return resp
return pagination_wrapper

View File

@ -1,68 +1,79 @@
import boto3 import boto3
import pytest
from moto import mock_dynamodb from moto import mock_dynamodb
from unittest import TestCase from unittest import TestCase
from . import dynamodb_aws_verified
class TestSelectStatements:
mock = mock_dynamodb()
@classmethod item1 = {
def setup_class(cls): "pk": {"S": "msg1"},
cls.mock.start() "body": {"S": "some text"},
cls.client = boto3.client("dynamodb", "us-east-1") "nested_attrs": {"M": {"some": {"S": "key"}}},
cls.client.create_table( "list_attrs": {"L": [{"BOOL": True}, {"BOOL": False}]},
TableName="messages", "bool_attr": {"BOOL": True},
KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], }
AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}], item2 = {"pk": {"S": "msg2"}, "body": {"S": "n/a"}, "unique_key": {"S": "key"}}
ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 5},
)
cls.item1 = {"id": {"S": "msg1"}, "body": {"S": "some text"}}
cls.item2 = {"id": {"S": "msg2"}, "body": {"S": "n/a"}, "unique": {"S": "key"}}
cls.client.put_item(TableName="messages", Item=cls.item1)
cls.client.put_item(TableName="messages", Item=cls.item2)
@classmethod
def teardown_class(cls):
try:
cls.mock.stop()
except RuntimeError:
pass
def test_execute_statement_select_star(self): def create_items(table_name):
items = TestSelectStatements.client.execute_statement( client = boto3.client("dynamodb", "us-east-1")
Statement="select * from messages" client.put_item(TableName=table_name, Item=item1)
)["Items"] client.put_item(TableName=table_name, Item=item2)
assert TestSelectStatements.item1 in items
assert TestSelectStatements.item2 in items
def test_execute_statement_select_unique(self):
items = TestSelectStatements.client.execute_statement( @pytest.mark.aws_verified
Statement="select unique from messages" @dynamodb_aws_verified
)["Items"] def test_execute_statement_select_star(table_name=None):
client = boto3.client("dynamodb", "us-east-1")
create_items(table_name)
items = client.execute_statement(Statement=f"select * from {table_name}")["Items"]
assert item1 in items
assert item2 in items
@pytest.mark.aws_verified
@dynamodb_aws_verified
def test_execute_statement_select_unique(table_name=None):
client = boto3.client("dynamodb", "us-east-1")
create_items(table_name)
items = client.execute_statement(Statement=f"select unique_key from {table_name}")[
"Items"
]
assert {} in items assert {} in items
assert {"unique": {"S": "key"}} in items assert {"unique_key": {"S": "key"}} in items
def test_execute_statement_with_parameter(self):
stmt = "select * from messages where id = ?" @pytest.mark.aws_verified
items = TestSelectStatements.client.execute_statement( @dynamodb_aws_verified
Statement=stmt, Parameters=[{"S": "msg1"}] def test_execute_statement_with_parameter(table_name=None):
)["Items"] client = boto3.client("dynamodb", "us-east-1")
create_items(table_name)
stmt = f"select * from {table_name} where pk = ?"
items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg1"}])[
"Items"
]
assert len(items) == 1 assert len(items) == 1
assert TestSelectStatements.item1 in items assert item1 in items
stmt = "select id from messages where id = ?" stmt = f"select pk from {table_name} where pk = ?"
items = TestSelectStatements.client.execute_statement( items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg1"}])[
Statement=stmt, Parameters=[{"S": "msg1"}] "Items"
)["Items"] ]
assert len(items) == 1 assert len(items) == 1
assert {"id": {"S": "msg1"}} in items assert {"pk": {"S": "msg1"}} in items
def test_execute_statement_with_no_results(self):
stmt = "select * from messages where id = ?" @pytest.mark.aws_verified
items = TestSelectStatements.client.execute_statement( @dynamodb_aws_verified
Statement=stmt, Parameters=[{"S": "msg3"}] def test_execute_statement_with_no_results(table_name=None):
)["Items"] client = boto3.client("dynamodb", "us-east-1")
create_items(table_name)
stmt = f"select * from {table_name} where pk = ?"
items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg3"}])[
"Items"
]
assert items == [] assert items == []

View File

@ -57,6 +57,7 @@ class launch_template_context:
self.ec2.delete_launch_template(LaunchTemplateId=self.lt_id) self.ec2.delete_launch_template(LaunchTemplateId=self.lt_id)
@pytest.mark.aws_verified
@ec2_aws_verified @ec2_aws_verified
def test_launch_template_is_created_properly(): def test_launch_template_is_created_properly():
with launch_template_context() as ctxt: with launch_template_context() as ctxt:
@ -379,6 +380,7 @@ def test_create_fleet_using_launch_template_config__overrides():
assert instance["SubnetId"] == subnet_id assert instance["SubnetId"] == subnet_id
@pytest.mark.aws_verified
@ec2_aws_verified @ec2_aws_verified
def test_delete_fleet(): def test_delete_fleet():
with launch_template_context() as ctxt: with launch_template_context() as ctxt:

View File

@ -1,10 +1,12 @@
import boto3 import boto3
import pytest
from uuid import uuid4 from uuid import uuid4
from . import lakeformation_aws_verified from . import lakeformation_aws_verified
@pytest.mark.aws_verified
@lakeformation_aws_verified @lakeformation_aws_verified
def test_add_unknown_lf_tags( def test_add_unknown_lf_tags(
bucket_name=None, # pylint: disable=unused-argument bucket_name=None, # pylint: disable=unused-argument
@ -32,6 +34,7 @@ def test_add_unknown_lf_tags(
} }
@pytest.mark.aws_verified
@lakeformation_aws_verified @lakeformation_aws_verified
def test_tag_lakeformation_database( def test_tag_lakeformation_database(
bucket_name=None, # pylint: disable=unused-argument bucket_name=None, # pylint: disable=unused-argument
@ -115,6 +118,7 @@ def test_tag_lakeformation_database(
) )
@pytest.mark.aws_verified
@lakeformation_aws_verified @lakeformation_aws_verified
def test_tag_lakeformation_table( def test_tag_lakeformation_table(
bucket_name=None, # pylint: disable=unused-argument bucket_name=None, # pylint: disable=unused-argument
@ -198,6 +202,7 @@ def test_tag_lakeformation_table(
) )
@pytest.mark.aws_verified
@lakeformation_aws_verified @lakeformation_aws_verified
def test_tag_lakeformation_columns( def test_tag_lakeformation_columns(
bucket_name=None, # pylint: disable=unused-argument bucket_name=None, # pylint: disable=unused-argument
@ -361,6 +366,7 @@ def test_tag_lakeformation_columns(
) )
@pytest.mark.aws_verified
@lakeformation_aws_verified @lakeformation_aws_verified
def test_lf_tags( def test_lf_tags(
bucket_name=None, db_name=None, table_name=None, column_name=None bucket_name=None, db_name=None, table_name=None, column_name=None

View File

@ -1,20 +1,22 @@
import json
from unittest import TestCase
from uuid import uuid4
import boto3 import boto3
import json
import pytest import pytest
from moto import mock_s3 from . import s3_aws_verified
SIMPLE_JSON = {"a1": "b1", "a2": "b2", "a3": None} SIMPLE_JSON = {"a1": "b1", "a2": "b2", "a3": None}
SIMPLE_JSON2 = {"a1": "b2", "a3": "b3"} SIMPLE_JSON2 = {"a1": "b2", "a3": "b3"}
NESTED_JSON = {"a1": {"b1": "b2"}, "a2": [True, False], "a3": True, "a4": [1, 5]}
EXTENSIVE_JSON = [ EXTENSIVE_JSON = [
{ {
"staff": [ "staff": [
{"name": "Janelyn M", "city": "Chicago", "kids": 2}, {
{"name": "Stacy P", "city": "Seattle", "kids": 1}, "name": "Janelyn M",
"city": "Chicago",
"kids": [{"Name": "Josh"}, {"Name": "Jay"}],
},
{"name": "Stacy P", "city": "Seattle", "kids": {"Name": "Josh"}},
], ],
"country": "USA", "country": "USA",
} }
@ -26,37 +28,32 @@ y,u,i
q,w,y""" q,w,y"""
@mock_s3 def create_test_files(bucket_name):
class TestS3Select(TestCase): client = boto3.client("s3", "us-east-1")
def setUp(self) -> None: client.put_object(
self.client = boto3.client("s3", "us-east-1") Bucket=bucket_name, Key="simple.json", Body=json.dumps(SIMPLE_JSON)
self.bucket_name = str(uuid4())
self.client.create_bucket(Bucket=self.bucket_name)
self.client.put_object(
Bucket=self.bucket_name, Key="simple.json", Body=json.dumps(SIMPLE_JSON)
) )
self.client.put_object( client.put_object(Bucket=bucket_name, Key="list.json", Body=json.dumps(SIMPLE_LIST))
Bucket=self.bucket_name, Key="list.json", Body=json.dumps(SIMPLE_LIST) client.put_object(Bucket=bucket_name, Key="simple_csv", Body=SIMPLE_CSV)
) client.put_object(
self.client.put_object( Bucket=bucket_name,
Bucket=self.bucket_name, Key="simple_csv", Body=SIMPLE_CSV
)
self.client.put_object(
Bucket=self.bucket_name,
Key="extensive.json", Key="extensive.json",
Body=json.dumps(EXTENSIVE_JSON), Body=json.dumps(EXTENSIVE_JSON),
) )
client.put_object(
Bucket=bucket_name,
Key="nested.json",
Body=json.dumps(NESTED_JSON),
)
def tearDown(self) -> None:
self.client.delete_object(Bucket=self.bucket_name, Key="list.json")
self.client.delete_object(Bucket=self.bucket_name, Key="simple.json")
self.client.delete_object(Bucket=self.bucket_name, Key="simple_csv")
self.client.delete_object(Bucket=self.bucket_name, Key="extensive.json")
self.client.delete_bucket(Bucket=self.bucket_name)
def test_query_all(self): @pytest.mark.aws_verified
content = self.client.select_object_content( @s3_aws_verified
Bucket=self.bucket_name, def test_query_all(bucket_name=None):
client = boto3.client("s3", "us-east-1")
create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="simple.json", Key="simple.json",
Expression="SELECT * FROM S3Object", Expression="SELECT * FROM S3Object",
ExpressionType="SQL", ExpressionType="SQL",
@ -70,20 +67,20 @@ class TestS3Select(TestCase):
json.loads(result[0]["Records"]["Payload"][0:-1].decode("utf-8")) json.loads(result[0]["Records"]["Payload"][0:-1].decode("utf-8"))
# Verify result contains metadata # Verify result contains metadata
assert { stats = [res for res in result if "Stats" in res][0]["Stats"]
"Stats": { assert "BytesScanned" in stats["Details"]
"Details": { assert "BytesProcessed" in stats["Details"]
"BytesScanned": 24, assert "BytesReturned" in stats["Details"]
"BytesProcessed": 24,
"BytesReturned": 22,
}
}
} in result
assert {"End": {}} in result assert {"End": {}} in result
def test_count_function(self):
content = self.client.select_object_content( @pytest.mark.aws_verified
Bucket=self.bucket_name, @s3_aws_verified
def test_count_function(bucket_name=None):
client = boto3.client("s3", "us-east-1")
create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="simple.json", Key="simple.json",
Expression="SELECT count(*) FROM S3Object", Expression="SELECT count(*) FROM S3Object",
ExpressionType="SQL", ExpressionType="SQL",
@ -93,10 +90,15 @@ class TestS3Select(TestCase):
result = list(content["Payload"]) result = list(content["Payload"])
assert {"Records": {"Payload": b'{"_1":1},'}} in result assert {"Records": {"Payload": b'{"_1":1},'}} in result
@pytest.mark.aws_verified
@s3_aws_verified
@pytest.mark.xfail(message="Not yet implement in our parser") @pytest.mark.xfail(message="Not yet implement in our parser")
def test_count_as(self): def test_count_as(bucket_name=None):
content = self.client.select_object_content( client = boto3.client("s3", "us-east-1")
Bucket=self.bucket_name, create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="simple.json", Key="simple.json",
Expression="SELECT count(*) as cnt FROM S3Object", Expression="SELECT count(*) as cnt FROM S3Object",
ExpressionType="SQL", ExpressionType="SQL",
@ -106,10 +108,15 @@ class TestS3Select(TestCase):
result = list(content["Payload"]) result = list(content["Payload"])
assert {"Records": {"Payload": b'{"cnt":1},'}} in result assert {"Records": {"Payload": b'{"cnt":1},'}} in result
@pytest.mark.aws_verified
@s3_aws_verified
@pytest.mark.xfail(message="Not yet implement in our parser") @pytest.mark.xfail(message="Not yet implement in our parser")
def test_count_list_as(self): def test_count_list_as(bucket_name=None):
content = self.client.select_object_content( client = boto3.client("s3", "us-east-1")
Bucket=self.bucket_name, create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="list.json", Key="list.json",
Expression="SELECT count(*) as cnt FROM S3Object", Expression="SELECT count(*) as cnt FROM S3Object",
ExpressionType="SQL", ExpressionType="SQL",
@ -119,38 +126,49 @@ class TestS3Select(TestCase):
result = list(content["Payload"]) result = list(content["Payload"])
assert {"Records": {"Payload": b'{"cnt":1},'}} in result assert {"Records": {"Payload": b'{"cnt":1},'}} in result
def test_count_csv(self):
content = self.client.select_object_content( @pytest.mark.aws_verified
Bucket=self.bucket_name, @s3_aws_verified
def test_count_csv(bucket_name=None):
client = boto3.client("s3", "us-east-1")
create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="simple_csv", Key="simple_csv",
Expression="SELECT count(*) FROM S3Object", Expression="SELECT count(*) FROM S3Object",
ExpressionType="SQL", ExpressionType="SQL",
InputSerialization={ InputSerialization={"CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","}},
"CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","}
},
OutputSerialization={"JSON": {"RecordDelimiter": ","}}, OutputSerialization={"JSON": {"RecordDelimiter": ","}},
) )
result = list(content["Payload"]) result = list(content["Payload"])
assert {"Records": {"Payload": b'{"_1":3},'}} in result assert {"Records": {"Payload": b'{"_1":3},'}} in result
def test_default_record_delimiter(self):
content = self.client.select_object_content( @pytest.mark.aws_verified
Bucket=self.bucket_name, @s3_aws_verified
def test_default_record_delimiter(bucket_name=None):
client = boto3.client("s3", "us-east-1")
create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="simple_csv", Key="simple_csv",
Expression="SELECT count(*) FROM S3Object", Expression="SELECT count(*) FROM S3Object",
ExpressionType="SQL", ExpressionType="SQL",
InputSerialization={ InputSerialization={"CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","}},
"CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","}
},
# RecordDelimiter is not specified - should default to new line (\n) # RecordDelimiter is not specified - should default to new line (\n)
OutputSerialization={"JSON": {}}, OutputSerialization={"JSON": {}},
) )
result = list(content["Payload"]) result = list(content["Payload"])
assert {"Records": {"Payload": b'{"_1":3}\n'}} in result assert {"Records": {"Payload": b'{"_1":3}\n'}} in result
def test_extensive_json__select_list(self):
content = self.client.select_object_content( @pytest.mark.aws_verified
Bucket=self.bucket_name, @s3_aws_verified
def test_extensive_json__select_list(bucket_name=None):
client = boto3.client("s3", "us-east-1")
create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="extensive.json", Key="extensive.json",
Expression="select * from s3object[*].staff[*] s", Expression="select * from s3object[*].staff[*] s",
ExpressionType="SQL", ExpressionType="SQL",
@ -160,9 +178,14 @@ class TestS3Select(TestCase):
result = list(content["Payload"]) result = list(content["Payload"])
assert {"Records": {"Payload": b"{},"}} in result assert {"Records": {"Payload": b"{},"}} in result
def test_extensive_json__select_all(self):
content = self.client.select_object_content( @pytest.mark.aws_verified
Bucket=self.bucket_name, @s3_aws_verified
def test_extensive_json__select_all(bucket_name=None):
client = boto3.client("s3", "us-east-1")
create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="extensive.json", Key="extensive.json",
Expression="select * from s3object s", Expression="select * from s3object s",
ExpressionType="SQL", ExpressionType="SQL",
@ -170,8 +193,36 @@ class TestS3Select(TestCase):
OutputSerialization={"JSON": {"RecordDelimiter": ","}}, OutputSerialization={"JSON": {"RecordDelimiter": ","}},
) )
result = list(content["Payload"]) result = list(content["Payload"])
assert { records = [res for res in result if "Records" in res][0]["Records"][
"Records": { "Payload"
"Payload": b'{"_1":[{"staff":[{"name":"Janelyn M","city":"Chicago","kids":2},{"name":"Stacy P","city":"Seattle","kids":1}],"country":"USA"}]},' ].decode("utf-8")
}
} in result # For some reason, AWS returns records with a comma at the end
assert records[-1] == ","
# Because the original doc is a list, it is returned like this
assert json.loads(records[:-1]) == {"_1": EXTENSIVE_JSON}
@pytest.mark.aws_verified
@s3_aws_verified
def test_nested_json__select_all(bucket_name=None):
client = boto3.client("s3", "us-east-1")
create_test_files(bucket_name)
content = client.select_object_content(
Bucket=bucket_name,
Key="nested.json",
Expression="select * from s3object s",
ExpressionType="SQL",
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
OutputSerialization={"JSON": {"RecordDelimiter": ","}},
)
result = list(content["Payload"])
records = [res for res in result if "Records" in res][0]["Records"][
"Payload"
].decode("utf-8")
# For some reason, AWS returns records with a comma at the end
assert records[-1] == ","
assert json.loads(records[:-1]) == NESTED_JSON

View File

@ -1297,6 +1297,7 @@ def test_render_template():
) )
@pytest.mark.aws_verified
@ses_aws_verified @ses_aws_verified
def test_render_template__advanced(): def test_render_template__advanced():
conn = boto3.client("ses", region_name="us-east-1") conn = boto3.client("ses", region_name="us-east-1")