diff --git a/.github/workflows/tests_real_aws.yml b/.github/workflows/tests_real_aws.yml index 1c726cfdc..4ff82f327 100644 --- a/.github/workflows/tests_real_aws.yml +++ b/.github/workflows/tests_real_aws.yml @@ -42,4 +42,4 @@ jobs: env: MOTO_TEST_ALLOW_AWS_REQUEST: ${{ true }} run: | - pytest -sv tests/test_ec2/ tests/test_lakeformation/ tests/test_ses/ tests/test_s3 -m aws_verified + pytest -sv tests/test_dynamodb/ tests/test_ec2/ tests/test_lakeformation/ tests/test_ses/ tests/test_s3 -m aws_verified diff --git a/setup.cfg b/setup.cfg index ed8590084..6a79236dc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -52,7 +52,7 @@ all = openapi-spec-validator>=0.2.8 pyparsing>=3.0.7 jsondiff>=1.1.2 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 aws-xray-sdk!=0.96,>=0.93 setuptools multipart @@ -67,7 +67,7 @@ proxy = openapi-spec-validator>=0.2.8 pyparsing>=3.0.7 jsondiff>=1.1.2 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 aws-xray-sdk!=0.96,>=0.93 setuptools multipart @@ -82,7 +82,7 @@ server = openapi-spec-validator>=0.2.8 pyparsing>=3.0.7 jsondiff>=1.1.2 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 aws-xray-sdk!=0.96,>=0.93 setuptools flask!=2.2.0,!=2.2.1 @@ -116,7 +116,7 @@ cloudformation = openapi-spec-validator>=0.2.8 pyparsing>=3.0.7 jsondiff>=1.1.2 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 aws-xray-sdk!=0.96,>=0.93 setuptools cloudfront = @@ -139,10 +139,10 @@ dms = ds = sshpubkeys>=3.1.0 dynamodb = docker>=3.0.0 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 dynamodbstreams = docker>=3.0.0 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 ebs = sshpubkeys>=3.1.0 ec2 = sshpubkeys>=3.1.0 ec2instanceconnect = @@ -205,15 +205,15 @@ resourcegroupstaggingapi = openapi-spec-validator>=0.2.8 pyparsing>=3.0.7 jsondiff>=1.1.2 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 route53 = route53resolver = sshpubkeys>=3.1.0 s3 = PyYAML>=5.1 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 s3crc32c = PyYAML>=5.1 - py-partiql-parser==0.3.7 + py-partiql-parser==0.3.8 crc32c s3control = sagemaker = diff --git a/tests/test_dynamodb/__init__.py b/tests/test_dynamodb/__init__.py index 08a1c1568..99086f604 100644 --- a/tests/test_dynamodb/__init__.py +++ b/tests/test_dynamodb/__init__.py @@ -1 +1,57 @@ -# This file is intentionally left blank. +import boto3 +import os +from functools import wraps +from moto import mock_dynamodb +from uuid import uuid4 + + +def dynamodb_aws_verified(func): + """ + Function that is verified to work against AWS. + Can be run against AWS at any time by setting: + MOTO_TEST_ALLOW_AWS_REQUEST=true + + If this environment variable is not set, the function runs in a `mock_dynamodb` context. + + This decorator will: + - Create a table + - Run the test and pass the table_name as an argument + - Delete the table + """ + + @wraps(func) + def pagination_wrapper(): + client = boto3.client("dynamodb", region_name="us-east-1") + table_name = str(uuid4())[0:6] + + allow_aws_request = ( + os.environ.get("MOTO_TEST_ALLOW_AWS_REQUEST", "false").lower() == "true" + ) + + if allow_aws_request: + print(f"Test {func} will create DynamoDB Table {table_name}") + resp = create_table_and_test(table_name, client) + else: + with mock_dynamodb(): + resp = create_table_and_test(table_name, client) + return resp + + def create_table_and_test(table_name, client): + client.create_table( + TableName=table_name, + KeySchema=[{"AttributeName": "pk", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "pk", "AttributeType": "S"}], + ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 5}, + Tags=[{"Key": "environment", "Value": "moto_tests"}], + ) + waiter = client.get_waiter("table_exists") + waiter.wait(TableName=table_name) + try: + resp = func(table_name) + finally: + ### CLEANUP ### + client.delete_table(TableName=table_name) + + return resp + + return pagination_wrapper diff --git a/tests/test_dynamodb/test_dynamodb_statements.py b/tests/test_dynamodb/test_dynamodb_statements.py index 48879ec34..952487321 100644 --- a/tests/test_dynamodb/test_dynamodb_statements.py +++ b/tests/test_dynamodb/test_dynamodb_statements.py @@ -1,69 +1,80 @@ import boto3 +import pytest from moto import mock_dynamodb from unittest import TestCase +from . import dynamodb_aws_verified -class TestSelectStatements: - mock = mock_dynamodb() - @classmethod - def setup_class(cls): - cls.mock.start() - cls.client = boto3.client("dynamodb", "us-east-1") - cls.client.create_table( - TableName="messages", - KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], - AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}], - ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 5}, - ) - cls.item1 = {"id": {"S": "msg1"}, "body": {"S": "some text"}} - cls.item2 = {"id": {"S": "msg2"}, "body": {"S": "n/a"}, "unique": {"S": "key"}} - cls.client.put_item(TableName="messages", Item=cls.item1) - cls.client.put_item(TableName="messages", Item=cls.item2) +item1 = { + "pk": {"S": "msg1"}, + "body": {"S": "some text"}, + "nested_attrs": {"M": {"some": {"S": "key"}}}, + "list_attrs": {"L": [{"BOOL": True}, {"BOOL": False}]}, + "bool_attr": {"BOOL": True}, +} +item2 = {"pk": {"S": "msg2"}, "body": {"S": "n/a"}, "unique_key": {"S": "key"}} - @classmethod - def teardown_class(cls): - try: - cls.mock.stop() - except RuntimeError: - pass - def test_execute_statement_select_star(self): - items = TestSelectStatements.client.execute_statement( - Statement="select * from messages" - )["Items"] - assert TestSelectStatements.item1 in items - assert TestSelectStatements.item2 in items +def create_items(table_name): + client = boto3.client("dynamodb", "us-east-1") + client.put_item(TableName=table_name, Item=item1) + client.put_item(TableName=table_name, Item=item2) - def test_execute_statement_select_unique(self): - items = TestSelectStatements.client.execute_statement( - Statement="select unique from messages" - )["Items"] - assert {} in items - assert {"unique": {"S": "key"}} in items - def test_execute_statement_with_parameter(self): - stmt = "select * from messages where id = ?" - items = TestSelectStatements.client.execute_statement( - Statement=stmt, Parameters=[{"S": "msg1"}] - )["Items"] - assert len(items) == 1 - assert TestSelectStatements.item1 in items +@pytest.mark.aws_verified +@dynamodb_aws_verified +def test_execute_statement_select_star(table_name=None): + client = boto3.client("dynamodb", "us-east-1") + create_items(table_name) + items = client.execute_statement(Statement=f"select * from {table_name}")["Items"] + assert item1 in items + assert item2 in items - stmt = "select id from messages where id = ?" - items = TestSelectStatements.client.execute_statement( - Statement=stmt, Parameters=[{"S": "msg1"}] - )["Items"] - assert len(items) == 1 - assert {"id": {"S": "msg1"}} in items - def test_execute_statement_with_no_results(self): - stmt = "select * from messages where id = ?" - items = TestSelectStatements.client.execute_statement( - Statement=stmt, Parameters=[{"S": "msg3"}] - )["Items"] - assert items == [] +@pytest.mark.aws_verified +@dynamodb_aws_verified +def test_execute_statement_select_unique(table_name=None): + client = boto3.client("dynamodb", "us-east-1") + create_items(table_name) + items = client.execute_statement(Statement=f"select unique_key from {table_name}")[ + "Items" + ] + assert {} in items + assert {"unique_key": {"S": "key"}} in items + + +@pytest.mark.aws_verified +@dynamodb_aws_verified +def test_execute_statement_with_parameter(table_name=None): + client = boto3.client("dynamodb", "us-east-1") + create_items(table_name) + stmt = f"select * from {table_name} where pk = ?" + items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg1"}])[ + "Items" + ] + assert len(items) == 1 + assert item1 in items + + stmt = f"select pk from {table_name} where pk = ?" + items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg1"}])[ + "Items" + ] + assert len(items) == 1 + assert {"pk": {"S": "msg1"}} in items + + +@pytest.mark.aws_verified +@dynamodb_aws_verified +def test_execute_statement_with_no_results(table_name=None): + client = boto3.client("dynamodb", "us-east-1") + create_items(table_name) + stmt = f"select * from {table_name} where pk = ?" + items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg3"}])[ + "Items" + ] + assert items == [] @mock_dynamodb diff --git a/tests/test_ec2/test_fleets.py b/tests/test_ec2/test_fleets.py index 2dfda6991..b63595411 100644 --- a/tests/test_ec2/test_fleets.py +++ b/tests/test_ec2/test_fleets.py @@ -57,6 +57,7 @@ class launch_template_context: self.ec2.delete_launch_template(LaunchTemplateId=self.lt_id) +@pytest.mark.aws_verified @ec2_aws_verified def test_launch_template_is_created_properly(): with launch_template_context() as ctxt: @@ -379,6 +380,7 @@ def test_create_fleet_using_launch_template_config__overrides(): assert instance["SubnetId"] == subnet_id +@pytest.mark.aws_verified @ec2_aws_verified def test_delete_fleet(): with launch_template_context() as ctxt: diff --git a/tests/test_lakeformation/test_resource_tags_integration.py b/tests/test_lakeformation/test_resource_tags_integration.py index ecc33cc16..1887e5a1c 100644 --- a/tests/test_lakeformation/test_resource_tags_integration.py +++ b/tests/test_lakeformation/test_resource_tags_integration.py @@ -1,10 +1,12 @@ import boto3 +import pytest from uuid import uuid4 from . import lakeformation_aws_verified +@pytest.mark.aws_verified @lakeformation_aws_verified def test_add_unknown_lf_tags( bucket_name=None, # pylint: disable=unused-argument @@ -32,6 +34,7 @@ def test_add_unknown_lf_tags( } +@pytest.mark.aws_verified @lakeformation_aws_verified def test_tag_lakeformation_database( bucket_name=None, # pylint: disable=unused-argument @@ -115,6 +118,7 @@ def test_tag_lakeformation_database( ) +@pytest.mark.aws_verified @lakeformation_aws_verified def test_tag_lakeformation_table( bucket_name=None, # pylint: disable=unused-argument @@ -198,6 +202,7 @@ def test_tag_lakeformation_table( ) +@pytest.mark.aws_verified @lakeformation_aws_verified def test_tag_lakeformation_columns( bucket_name=None, # pylint: disable=unused-argument @@ -361,6 +366,7 @@ def test_tag_lakeformation_columns( ) +@pytest.mark.aws_verified @lakeformation_aws_verified def test_lf_tags( bucket_name=None, db_name=None, table_name=None, column_name=None diff --git a/tests/test_s3/test_s3_select.py b/tests/test_s3/test_s3_select.py index d299365f8..84a80dc34 100644 --- a/tests/test_s3/test_s3_select.py +++ b/tests/test_s3/test_s3_select.py @@ -1,20 +1,22 @@ -import json -from unittest import TestCase -from uuid import uuid4 - import boto3 +import json import pytest -from moto import mock_s3 +from . import s3_aws_verified SIMPLE_JSON = {"a1": "b1", "a2": "b2", "a3": None} SIMPLE_JSON2 = {"a1": "b2", "a3": "b3"} +NESTED_JSON = {"a1": {"b1": "b2"}, "a2": [True, False], "a3": True, "a4": [1, 5]} EXTENSIVE_JSON = [ { "staff": [ - {"name": "Janelyn M", "city": "Chicago", "kids": 2}, - {"name": "Stacy P", "city": "Seattle", "kids": 1}, + { + "name": "Janelyn M", + "city": "Chicago", + "kids": [{"Name": "Josh"}, {"Name": "Jay"}], + }, + {"name": "Stacy P", "city": "Seattle", "kids": {"Name": "Josh"}}, ], "country": "USA", } @@ -26,152 +28,201 @@ y,u,i q,w,y""" -@mock_s3 -class TestS3Select(TestCase): - def setUp(self) -> None: - self.client = boto3.client("s3", "us-east-1") - self.bucket_name = str(uuid4()) - self.client.create_bucket(Bucket=self.bucket_name) - self.client.put_object( - Bucket=self.bucket_name, Key="simple.json", Body=json.dumps(SIMPLE_JSON) - ) - self.client.put_object( - Bucket=self.bucket_name, Key="list.json", Body=json.dumps(SIMPLE_LIST) - ) - self.client.put_object( - Bucket=self.bucket_name, Key="simple_csv", Body=SIMPLE_CSV - ) - self.client.put_object( - Bucket=self.bucket_name, - Key="extensive.json", - Body=json.dumps(EXTENSIVE_JSON), - ) +def create_test_files(bucket_name): + client = boto3.client("s3", "us-east-1") + client.put_object( + Bucket=bucket_name, Key="simple.json", Body=json.dumps(SIMPLE_JSON) + ) + client.put_object(Bucket=bucket_name, Key="list.json", Body=json.dumps(SIMPLE_LIST)) + client.put_object(Bucket=bucket_name, Key="simple_csv", Body=SIMPLE_CSV) + client.put_object( + Bucket=bucket_name, + Key="extensive.json", + Body=json.dumps(EXTENSIVE_JSON), + ) + client.put_object( + Bucket=bucket_name, + Key="nested.json", + Body=json.dumps(NESTED_JSON), + ) - def tearDown(self) -> None: - self.client.delete_object(Bucket=self.bucket_name, Key="list.json") - self.client.delete_object(Bucket=self.bucket_name, Key="simple.json") - self.client.delete_object(Bucket=self.bucket_name, Key="simple_csv") - self.client.delete_object(Bucket=self.bucket_name, Key="extensive.json") - self.client.delete_bucket(Bucket=self.bucket_name) - def test_query_all(self): - content = self.client.select_object_content( - Bucket=self.bucket_name, - Key="simple.json", - Expression="SELECT * FROM S3Object", - ExpressionType="SQL", - InputSerialization={"JSON": {"Type": "DOCUMENT"}}, - OutputSerialization={"JSON": {"RecordDelimiter": ","}}, - ) - result = list(content["Payload"]) - assert {"Records": {"Payload": b'{"a1":"b1","a2":"b2","a3":null},'}} in result +@pytest.mark.aws_verified +@s3_aws_verified +def test_query_all(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="simple.json", + Expression="SELECT * FROM S3Object", + ExpressionType="SQL", + InputSerialization={"JSON": {"Type": "DOCUMENT"}}, + OutputSerialization={"JSON": {"RecordDelimiter": ","}}, + ) + result = list(content["Payload"]) + assert {"Records": {"Payload": b'{"a1":"b1","a2":"b2","a3":null},'}} in result - # Verify result is valid JSON - json.loads(result[0]["Records"]["Payload"][0:-1].decode("utf-8")) + # Verify result is valid JSON + json.loads(result[0]["Records"]["Payload"][0:-1].decode("utf-8")) - # Verify result contains metadata - assert { - "Stats": { - "Details": { - "BytesScanned": 24, - "BytesProcessed": 24, - "BytesReturned": 22, - } - } - } in result - assert {"End": {}} in result + # Verify result contains metadata + stats = [res for res in result if "Stats" in res][0]["Stats"] + assert "BytesScanned" in stats["Details"] + assert "BytesProcessed" in stats["Details"] + assert "BytesReturned" in stats["Details"] + assert {"End": {}} in result - def test_count_function(self): - content = self.client.select_object_content( - Bucket=self.bucket_name, - Key="simple.json", - Expression="SELECT count(*) FROM S3Object", - ExpressionType="SQL", - InputSerialization={"JSON": {"Type": "DOCUMENT"}}, - OutputSerialization={"JSON": {"RecordDelimiter": ","}}, - ) - result = list(content["Payload"]) - assert {"Records": {"Payload": b'{"_1":1},'}} in result - @pytest.mark.xfail(message="Not yet implement in our parser") - def test_count_as(self): - content = self.client.select_object_content( - Bucket=self.bucket_name, - Key="simple.json", - Expression="SELECT count(*) as cnt FROM S3Object", - ExpressionType="SQL", - InputSerialization={"JSON": {"Type": "DOCUMENT"}}, - OutputSerialization={"JSON": {"RecordDelimiter": ","}}, - ) - result = list(content["Payload"]) - assert {"Records": {"Payload": b'{"cnt":1},'}} in result +@pytest.mark.aws_verified +@s3_aws_verified +def test_count_function(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="simple.json", + Expression="SELECT count(*) FROM S3Object", + ExpressionType="SQL", + InputSerialization={"JSON": {"Type": "DOCUMENT"}}, + OutputSerialization={"JSON": {"RecordDelimiter": ","}}, + ) + result = list(content["Payload"]) + assert {"Records": {"Payload": b'{"_1":1},'}} in result - @pytest.mark.xfail(message="Not yet implement in our parser") - def test_count_list_as(self): - content = self.client.select_object_content( - Bucket=self.bucket_name, - Key="list.json", - Expression="SELECT count(*) as cnt FROM S3Object", - ExpressionType="SQL", - InputSerialization={"JSON": {"Type": "DOCUMENT"}}, - OutputSerialization={"JSON": {"RecordDelimiter": ","}}, - ) - result = list(content["Payload"]) - assert {"Records": {"Payload": b'{"cnt":1},'}} in result - def test_count_csv(self): - content = self.client.select_object_content( - Bucket=self.bucket_name, - Key="simple_csv", - Expression="SELECT count(*) FROM S3Object", - ExpressionType="SQL", - InputSerialization={ - "CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","} - }, - OutputSerialization={"JSON": {"RecordDelimiter": ","}}, - ) - result = list(content["Payload"]) - assert {"Records": {"Payload": b'{"_1":3},'}} in result +@pytest.mark.aws_verified +@s3_aws_verified +@pytest.mark.xfail(message="Not yet implement in our parser") +def test_count_as(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="simple.json", + Expression="SELECT count(*) as cnt FROM S3Object", + ExpressionType="SQL", + InputSerialization={"JSON": {"Type": "DOCUMENT"}}, + OutputSerialization={"JSON": {"RecordDelimiter": ","}}, + ) + result = list(content["Payload"]) + assert {"Records": {"Payload": b'{"cnt":1},'}} in result - def test_default_record_delimiter(self): - content = self.client.select_object_content( - Bucket=self.bucket_name, - Key="simple_csv", - Expression="SELECT count(*) FROM S3Object", - ExpressionType="SQL", - InputSerialization={ - "CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","} - }, - # RecordDelimiter is not specified - should default to new line (\n) - OutputSerialization={"JSON": {}}, - ) - result = list(content["Payload"]) - assert {"Records": {"Payload": b'{"_1":3}\n'}} in result - def test_extensive_json__select_list(self): - content = self.client.select_object_content( - Bucket=self.bucket_name, - Key="extensive.json", - Expression="select * from s3object[*].staff[*] s", - ExpressionType="SQL", - InputSerialization={"JSON": {"Type": "DOCUMENT"}}, - OutputSerialization={"JSON": {"RecordDelimiter": ","}}, - ) - result = list(content["Payload"]) - assert {"Records": {"Payload": b"{},"}} in result +@pytest.mark.aws_verified +@s3_aws_verified +@pytest.mark.xfail(message="Not yet implement in our parser") +def test_count_list_as(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="list.json", + Expression="SELECT count(*) as cnt FROM S3Object", + ExpressionType="SQL", + InputSerialization={"JSON": {"Type": "DOCUMENT"}}, + OutputSerialization={"JSON": {"RecordDelimiter": ","}}, + ) + result = list(content["Payload"]) + assert {"Records": {"Payload": b'{"cnt":1},'}} in result - def test_extensive_json__select_all(self): - content = self.client.select_object_content( - Bucket=self.bucket_name, - Key="extensive.json", - Expression="select * from s3object s", - ExpressionType="SQL", - InputSerialization={"JSON": {"Type": "DOCUMENT"}}, - OutputSerialization={"JSON": {"RecordDelimiter": ","}}, - ) - result = list(content["Payload"]) - assert { - "Records": { - "Payload": b'{"_1":[{"staff":[{"name":"Janelyn M","city":"Chicago","kids":2},{"name":"Stacy P","city":"Seattle","kids":1}],"country":"USA"}]},' - } - } in result + +@pytest.mark.aws_verified +@s3_aws_verified +def test_count_csv(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="simple_csv", + Expression="SELECT count(*) FROM S3Object", + ExpressionType="SQL", + InputSerialization={"CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","}}, + OutputSerialization={"JSON": {"RecordDelimiter": ","}}, + ) + result = list(content["Payload"]) + assert {"Records": {"Payload": b'{"_1":3},'}} in result + + +@pytest.mark.aws_verified +@s3_aws_verified +def test_default_record_delimiter(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="simple_csv", + Expression="SELECT count(*) FROM S3Object", + ExpressionType="SQL", + InputSerialization={"CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","}}, + # RecordDelimiter is not specified - should default to new line (\n) + OutputSerialization={"JSON": {}}, + ) + result = list(content["Payload"]) + assert {"Records": {"Payload": b'{"_1":3}\n'}} in result + + +@pytest.mark.aws_verified +@s3_aws_verified +def test_extensive_json__select_list(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="extensive.json", + Expression="select * from s3object[*].staff[*] s", + ExpressionType="SQL", + InputSerialization={"JSON": {"Type": "DOCUMENT"}}, + OutputSerialization={"JSON": {"RecordDelimiter": ","}}, + ) + result = list(content["Payload"]) + assert {"Records": {"Payload": b"{},"}} in result + + +@pytest.mark.aws_verified +@s3_aws_verified +def test_extensive_json__select_all(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="extensive.json", + Expression="select * from s3object s", + ExpressionType="SQL", + InputSerialization={"JSON": {"Type": "DOCUMENT"}}, + OutputSerialization={"JSON": {"RecordDelimiter": ","}}, + ) + result = list(content["Payload"]) + records = [res for res in result if "Records" in res][0]["Records"][ + "Payload" + ].decode("utf-8") + + # For some reason, AWS returns records with a comma at the end + assert records[-1] == "," + + # Because the original doc is a list, it is returned like this + assert json.loads(records[:-1]) == {"_1": EXTENSIVE_JSON} + + +@pytest.mark.aws_verified +@s3_aws_verified +def test_nested_json__select_all(bucket_name=None): + client = boto3.client("s3", "us-east-1") + create_test_files(bucket_name) + content = client.select_object_content( + Bucket=bucket_name, + Key="nested.json", + Expression="select * from s3object s", + ExpressionType="SQL", + InputSerialization={"JSON": {"Type": "DOCUMENT"}}, + OutputSerialization={"JSON": {"RecordDelimiter": ","}}, + ) + result = list(content["Payload"]) + records = [res for res in result if "Records" in res][0]["Records"][ + "Payload" + ].decode("utf-8") + + # For some reason, AWS returns records with a comma at the end + assert records[-1] == "," + + assert json.loads(records[:-1]) == NESTED_JSON diff --git a/tests/test_ses/test_ses_boto3.py b/tests/test_ses/test_ses_boto3.py index eda9cb1b0..c06408c17 100644 --- a/tests/test_ses/test_ses_boto3.py +++ b/tests/test_ses/test_ses_boto3.py @@ -1297,6 +1297,7 @@ def test_render_template(): ) +@pytest.mark.aws_verified @ses_aws_verified def test_render_template__advanced(): conn = boto3.client("ses", region_name="us-east-1")