S3/DynamoDB: Improve PartiQL tests (#6897)
This commit is contained in:
		
							parent
							
								
									5cd288b42c
								
							
						
					
					
						commit
						8ea041029a
					
				
							
								
								
									
										2
									
								
								.github/workflows/tests_real_aws.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/tests_real_aws.yml
									
									
									
									
										vendored
									
									
								
							| @ -42,4 +42,4 @@ jobs: | |||||||
|       env: |       env: | ||||||
|         MOTO_TEST_ALLOW_AWS_REQUEST: ${{ true }} |         MOTO_TEST_ALLOW_AWS_REQUEST: ${{ true }} | ||||||
|       run: | |       run: | | ||||||
|         pytest -sv tests/test_ec2/ tests/test_lakeformation/ tests/test_ses/ tests/test_s3 -m aws_verified |         pytest -sv tests/test_dynamodb/ tests/test_ec2/ tests/test_lakeformation/ tests/test_ses/ tests/test_s3 -m aws_verified | ||||||
|  | |||||||
							
								
								
									
										18
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								setup.cfg
									
									
									
									
									
								
							| @ -52,7 +52,7 @@ all = | |||||||
|     openapi-spec-validator>=0.2.8 |     openapi-spec-validator>=0.2.8 | ||||||
|     pyparsing>=3.0.7 |     pyparsing>=3.0.7 | ||||||
|     jsondiff>=1.1.2 |     jsondiff>=1.1.2 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
|     aws-xray-sdk!=0.96,>=0.93 |     aws-xray-sdk!=0.96,>=0.93 | ||||||
|     setuptools |     setuptools | ||||||
|     multipart |     multipart | ||||||
| @ -67,7 +67,7 @@ proxy = | |||||||
|     openapi-spec-validator>=0.2.8 |     openapi-spec-validator>=0.2.8 | ||||||
|     pyparsing>=3.0.7 |     pyparsing>=3.0.7 | ||||||
|     jsondiff>=1.1.2 |     jsondiff>=1.1.2 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
|     aws-xray-sdk!=0.96,>=0.93 |     aws-xray-sdk!=0.96,>=0.93 | ||||||
|     setuptools |     setuptools | ||||||
|     multipart |     multipart | ||||||
| @ -82,7 +82,7 @@ server = | |||||||
|     openapi-spec-validator>=0.2.8 |     openapi-spec-validator>=0.2.8 | ||||||
|     pyparsing>=3.0.7 |     pyparsing>=3.0.7 | ||||||
|     jsondiff>=1.1.2 |     jsondiff>=1.1.2 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
|     aws-xray-sdk!=0.96,>=0.93 |     aws-xray-sdk!=0.96,>=0.93 | ||||||
|     setuptools |     setuptools | ||||||
|     flask!=2.2.0,!=2.2.1 |     flask!=2.2.0,!=2.2.1 | ||||||
| @ -116,7 +116,7 @@ cloudformation = | |||||||
|     openapi-spec-validator>=0.2.8 |     openapi-spec-validator>=0.2.8 | ||||||
|     pyparsing>=3.0.7 |     pyparsing>=3.0.7 | ||||||
|     jsondiff>=1.1.2 |     jsondiff>=1.1.2 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
|     aws-xray-sdk!=0.96,>=0.93 |     aws-xray-sdk!=0.96,>=0.93 | ||||||
|     setuptools |     setuptools | ||||||
| cloudfront = | cloudfront = | ||||||
| @ -139,10 +139,10 @@ dms = | |||||||
| ds = sshpubkeys>=3.1.0 | ds = sshpubkeys>=3.1.0 | ||||||
| dynamodb = | dynamodb = | ||||||
|     docker>=3.0.0 |     docker>=3.0.0 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
| dynamodbstreams = | dynamodbstreams = | ||||||
|     docker>=3.0.0 |     docker>=3.0.0 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
| ebs = sshpubkeys>=3.1.0 | ebs = sshpubkeys>=3.1.0 | ||||||
| ec2 = sshpubkeys>=3.1.0 | ec2 = sshpubkeys>=3.1.0 | ||||||
| ec2instanceconnect = | ec2instanceconnect = | ||||||
| @ -205,15 +205,15 @@ resourcegroupstaggingapi = | |||||||
|     openapi-spec-validator>=0.2.8 |     openapi-spec-validator>=0.2.8 | ||||||
|     pyparsing>=3.0.7 |     pyparsing>=3.0.7 | ||||||
|     jsondiff>=1.1.2 |     jsondiff>=1.1.2 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
| route53 = | route53 = | ||||||
| route53resolver = sshpubkeys>=3.1.0 | route53resolver = sshpubkeys>=3.1.0 | ||||||
| s3 = | s3 = | ||||||
|     PyYAML>=5.1 |     PyYAML>=5.1 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
| s3crc32c = | s3crc32c = | ||||||
|     PyYAML>=5.1 |     PyYAML>=5.1 | ||||||
|     py-partiql-parser==0.3.7 |     py-partiql-parser==0.3.8 | ||||||
|     crc32c |     crc32c | ||||||
| s3control = | s3control = | ||||||
| sagemaker = | sagemaker = | ||||||
|  | |||||||
| @ -1 +1,57 @@ | |||||||
| # This file is intentionally left blank. | import boto3 | ||||||
|  | import os | ||||||
|  | from functools import wraps | ||||||
|  | from moto import mock_dynamodb | ||||||
|  | from uuid import uuid4 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def dynamodb_aws_verified(func): | ||||||
|  |     """ | ||||||
|  |     Function that is verified to work against AWS. | ||||||
|  |     Can be run against AWS at any time by setting: | ||||||
|  |       MOTO_TEST_ALLOW_AWS_REQUEST=true | ||||||
|  | 
 | ||||||
|  |     If this environment variable is not set, the function runs in a `mock_dynamodb` context. | ||||||
|  | 
 | ||||||
|  |     This decorator will: | ||||||
|  |       - Create a table | ||||||
|  |       - Run the test and pass the table_name as an argument | ||||||
|  |       - Delete the table | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     @wraps(func) | ||||||
|  |     def pagination_wrapper(): | ||||||
|  |         client = boto3.client("dynamodb", region_name="us-east-1") | ||||||
|  |         table_name = str(uuid4())[0:6] | ||||||
|  | 
 | ||||||
|  |         allow_aws_request = ( | ||||||
|  |             os.environ.get("MOTO_TEST_ALLOW_AWS_REQUEST", "false").lower() == "true" | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         if allow_aws_request: | ||||||
|  |             print(f"Test {func} will create DynamoDB Table {table_name}") | ||||||
|  |             resp = create_table_and_test(table_name, client) | ||||||
|  |         else: | ||||||
|  |             with mock_dynamodb(): | ||||||
|  |                 resp = create_table_and_test(table_name, client) | ||||||
|  |         return resp | ||||||
|  | 
 | ||||||
|  |     def create_table_and_test(table_name, client): | ||||||
|  |         client.create_table( | ||||||
|  |             TableName=table_name, | ||||||
|  |             KeySchema=[{"AttributeName": "pk", "KeyType": "HASH"}], | ||||||
|  |             AttributeDefinitions=[{"AttributeName": "pk", "AttributeType": "S"}], | ||||||
|  |             ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 5}, | ||||||
|  |             Tags=[{"Key": "environment", "Value": "moto_tests"}], | ||||||
|  |         ) | ||||||
|  |         waiter = client.get_waiter("table_exists") | ||||||
|  |         waiter.wait(TableName=table_name) | ||||||
|  |         try: | ||||||
|  |             resp = func(table_name) | ||||||
|  |         finally: | ||||||
|  |             ### CLEANUP ### | ||||||
|  |             client.delete_table(TableName=table_name) | ||||||
|  | 
 | ||||||
|  |         return resp | ||||||
|  | 
 | ||||||
|  |     return pagination_wrapper | ||||||
|  | |||||||
| @ -1,69 +1,80 @@ | |||||||
| import boto3 | import boto3 | ||||||
|  | import pytest | ||||||
| 
 | 
 | ||||||
| from moto import mock_dynamodb | from moto import mock_dynamodb | ||||||
| from unittest import TestCase | from unittest import TestCase | ||||||
| 
 | 
 | ||||||
|  | from . import dynamodb_aws_verified | ||||||
| 
 | 
 | ||||||
| class TestSelectStatements: |  | ||||||
|     mock = mock_dynamodb() |  | ||||||
| 
 | 
 | ||||||
|     @classmethod | item1 = { | ||||||
|     def setup_class(cls): |     "pk": {"S": "msg1"}, | ||||||
|         cls.mock.start() |     "body": {"S": "some text"}, | ||||||
|         cls.client = boto3.client("dynamodb", "us-east-1") |     "nested_attrs": {"M": {"some": {"S": "key"}}}, | ||||||
|         cls.client.create_table( |     "list_attrs": {"L": [{"BOOL": True}, {"BOOL": False}]}, | ||||||
|             TableName="messages", |     "bool_attr": {"BOOL": True}, | ||||||
|             KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], | } | ||||||
|             AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}], | item2 = {"pk": {"S": "msg2"}, "body": {"S": "n/a"}, "unique_key": {"S": "key"}} | ||||||
|             ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 5}, |  | ||||||
|         ) |  | ||||||
|         cls.item1 = {"id": {"S": "msg1"}, "body": {"S": "some text"}} |  | ||||||
|         cls.item2 = {"id": {"S": "msg2"}, "body": {"S": "n/a"}, "unique": {"S": "key"}} |  | ||||||
|         cls.client.put_item(TableName="messages", Item=cls.item1) |  | ||||||
|         cls.client.put_item(TableName="messages", Item=cls.item2) |  | ||||||
| 
 | 
 | ||||||
|     @classmethod |  | ||||||
|     def teardown_class(cls): |  | ||||||
|         try: |  | ||||||
|             cls.mock.stop() |  | ||||||
|         except RuntimeError: |  | ||||||
|             pass |  | ||||||
| 
 | 
 | ||||||
|     def test_execute_statement_select_star(self): | def create_items(table_name): | ||||||
|         items = TestSelectStatements.client.execute_statement( |     client = boto3.client("dynamodb", "us-east-1") | ||||||
|             Statement="select * from messages" |     client.put_item(TableName=table_name, Item=item1) | ||||||
|         )["Items"] |     client.put_item(TableName=table_name, Item=item2) | ||||||
|         assert TestSelectStatements.item1 in items |  | ||||||
|         assert TestSelectStatements.item2 in items |  | ||||||
| 
 | 
 | ||||||
|     def test_execute_statement_select_unique(self): |  | ||||||
|         items = TestSelectStatements.client.execute_statement( |  | ||||||
|             Statement="select unique from messages" |  | ||||||
|         )["Items"] |  | ||||||
|         assert {} in items |  | ||||||
|         assert {"unique": {"S": "key"}} in items |  | ||||||
| 
 | 
 | ||||||
|     def test_execute_statement_with_parameter(self): | @pytest.mark.aws_verified | ||||||
|         stmt = "select * from messages where id = ?" | @dynamodb_aws_verified | ||||||
|         items = TestSelectStatements.client.execute_statement( | def test_execute_statement_select_star(table_name=None): | ||||||
|             Statement=stmt, Parameters=[{"S": "msg1"}] |     client = boto3.client("dynamodb", "us-east-1") | ||||||
|         )["Items"] |     create_items(table_name) | ||||||
|         assert len(items) == 1 |     items = client.execute_statement(Statement=f"select * from {table_name}")["Items"] | ||||||
|         assert TestSelectStatements.item1 in items |     assert item1 in items | ||||||
|  |     assert item2 in items | ||||||
| 
 | 
 | ||||||
|         stmt = "select id from messages where id = ?" |  | ||||||
|         items = TestSelectStatements.client.execute_statement( |  | ||||||
|             Statement=stmt, Parameters=[{"S": "msg1"}] |  | ||||||
|         )["Items"] |  | ||||||
|         assert len(items) == 1 |  | ||||||
|         assert {"id": {"S": "msg1"}} in items |  | ||||||
| 
 | 
 | ||||||
|     def test_execute_statement_with_no_results(self): | @pytest.mark.aws_verified | ||||||
|         stmt = "select * from messages where id = ?" | @dynamodb_aws_verified | ||||||
|         items = TestSelectStatements.client.execute_statement( | def test_execute_statement_select_unique(table_name=None): | ||||||
|             Statement=stmt, Parameters=[{"S": "msg3"}] |     client = boto3.client("dynamodb", "us-east-1") | ||||||
|         )["Items"] |     create_items(table_name) | ||||||
|         assert items == [] |     items = client.execute_statement(Statement=f"select unique_key from {table_name}")[ | ||||||
|  |         "Items" | ||||||
|  |     ] | ||||||
|  |     assert {} in items | ||||||
|  |     assert {"unique_key": {"S": "key"}} in items | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
|  | @dynamodb_aws_verified | ||||||
|  | def test_execute_statement_with_parameter(table_name=None): | ||||||
|  |     client = boto3.client("dynamodb", "us-east-1") | ||||||
|  |     create_items(table_name) | ||||||
|  |     stmt = f"select * from {table_name} where pk = ?" | ||||||
|  |     items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg1"}])[ | ||||||
|  |         "Items" | ||||||
|  |     ] | ||||||
|  |     assert len(items) == 1 | ||||||
|  |     assert item1 in items | ||||||
|  | 
 | ||||||
|  |     stmt = f"select pk from {table_name} where pk = ?" | ||||||
|  |     items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg1"}])[ | ||||||
|  |         "Items" | ||||||
|  |     ] | ||||||
|  |     assert len(items) == 1 | ||||||
|  |     assert {"pk": {"S": "msg1"}} in items | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
|  | @dynamodb_aws_verified | ||||||
|  | def test_execute_statement_with_no_results(table_name=None): | ||||||
|  |     client = boto3.client("dynamodb", "us-east-1") | ||||||
|  |     create_items(table_name) | ||||||
|  |     stmt = f"select * from {table_name} where pk = ?" | ||||||
|  |     items = client.execute_statement(Statement=stmt, Parameters=[{"S": "msg3"}])[ | ||||||
|  |         "Items" | ||||||
|  |     ] | ||||||
|  |     assert items == [] | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @mock_dynamodb | @mock_dynamodb | ||||||
|  | |||||||
| @ -57,6 +57,7 @@ class launch_template_context: | |||||||
|         self.ec2.delete_launch_template(LaunchTemplateId=self.lt_id) |         self.ec2.delete_launch_template(LaunchTemplateId=self.lt_id) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
| @ec2_aws_verified | @ec2_aws_verified | ||||||
| def test_launch_template_is_created_properly(): | def test_launch_template_is_created_properly(): | ||||||
|     with launch_template_context() as ctxt: |     with launch_template_context() as ctxt: | ||||||
| @ -379,6 +380,7 @@ def test_create_fleet_using_launch_template_config__overrides(): | |||||||
|     assert instance["SubnetId"] == subnet_id |     assert instance["SubnetId"] == subnet_id | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
| @ec2_aws_verified | @ec2_aws_verified | ||||||
| def test_delete_fleet(): | def test_delete_fleet(): | ||||||
|     with launch_template_context() as ctxt: |     with launch_template_context() as ctxt: | ||||||
|  | |||||||
| @ -1,10 +1,12 @@ | |||||||
| import boto3 | import boto3 | ||||||
|  | import pytest | ||||||
| 
 | 
 | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
| 
 | 
 | ||||||
| from . import lakeformation_aws_verified | from . import lakeformation_aws_verified | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
| @lakeformation_aws_verified | @lakeformation_aws_verified | ||||||
| def test_add_unknown_lf_tags( | def test_add_unknown_lf_tags( | ||||||
|     bucket_name=None,  # pylint: disable=unused-argument |     bucket_name=None,  # pylint: disable=unused-argument | ||||||
| @ -32,6 +34,7 @@ def test_add_unknown_lf_tags( | |||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
| @lakeformation_aws_verified | @lakeformation_aws_verified | ||||||
| def test_tag_lakeformation_database( | def test_tag_lakeformation_database( | ||||||
|     bucket_name=None,  # pylint: disable=unused-argument |     bucket_name=None,  # pylint: disable=unused-argument | ||||||
| @ -115,6 +118,7 @@ def test_tag_lakeformation_database( | |||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
| @lakeformation_aws_verified | @lakeformation_aws_verified | ||||||
| def test_tag_lakeformation_table( | def test_tag_lakeformation_table( | ||||||
|     bucket_name=None,  # pylint: disable=unused-argument |     bucket_name=None,  # pylint: disable=unused-argument | ||||||
| @ -198,6 +202,7 @@ def test_tag_lakeformation_table( | |||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
| @lakeformation_aws_verified | @lakeformation_aws_verified | ||||||
| def test_tag_lakeformation_columns( | def test_tag_lakeformation_columns( | ||||||
|     bucket_name=None,  # pylint: disable=unused-argument |     bucket_name=None,  # pylint: disable=unused-argument | ||||||
| @ -361,6 +366,7 @@ def test_tag_lakeformation_columns( | |||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
| @lakeformation_aws_verified | @lakeformation_aws_verified | ||||||
| def test_lf_tags( | def test_lf_tags( | ||||||
|     bucket_name=None, db_name=None, table_name=None, column_name=None |     bucket_name=None, db_name=None, table_name=None, column_name=None | ||||||
|  | |||||||
| @ -1,20 +1,22 @@ | |||||||
| import json |  | ||||||
| from unittest import TestCase |  | ||||||
| from uuid import uuid4 |  | ||||||
| 
 |  | ||||||
| import boto3 | import boto3 | ||||||
|  | import json | ||||||
| import pytest | import pytest | ||||||
| 
 | 
 | ||||||
| from moto import mock_s3 | from . import s3_aws_verified | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| SIMPLE_JSON = {"a1": "b1", "a2": "b2", "a3": None} | SIMPLE_JSON = {"a1": "b1", "a2": "b2", "a3": None} | ||||||
| SIMPLE_JSON2 = {"a1": "b2", "a3": "b3"} | SIMPLE_JSON2 = {"a1": "b2", "a3": "b3"} | ||||||
|  | NESTED_JSON = {"a1": {"b1": "b2"}, "a2": [True, False], "a3": True, "a4": [1, 5]} | ||||||
| EXTENSIVE_JSON = [ | EXTENSIVE_JSON = [ | ||||||
|     { |     { | ||||||
|         "staff": [ |         "staff": [ | ||||||
|             {"name": "Janelyn M", "city": "Chicago", "kids": 2}, |             { | ||||||
|             {"name": "Stacy P", "city": "Seattle", "kids": 1}, |                 "name": "Janelyn M", | ||||||
|  |                 "city": "Chicago", | ||||||
|  |                 "kids": [{"Name": "Josh"}, {"Name": "Jay"}], | ||||||
|  |             }, | ||||||
|  |             {"name": "Stacy P", "city": "Seattle", "kids": {"Name": "Josh"}}, | ||||||
|         ], |         ], | ||||||
|         "country": "USA", |         "country": "USA", | ||||||
|     } |     } | ||||||
| @ -26,152 +28,201 @@ y,u,i | |||||||
| q,w,y""" | q,w,y""" | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @mock_s3 | def create_test_files(bucket_name): | ||||||
| class TestS3Select(TestCase): |     client = boto3.client("s3", "us-east-1") | ||||||
|     def setUp(self) -> None: |     client.put_object( | ||||||
|         self.client = boto3.client("s3", "us-east-1") |         Bucket=bucket_name, Key="simple.json", Body=json.dumps(SIMPLE_JSON) | ||||||
|         self.bucket_name = str(uuid4()) |     ) | ||||||
|         self.client.create_bucket(Bucket=self.bucket_name) |     client.put_object(Bucket=bucket_name, Key="list.json", Body=json.dumps(SIMPLE_LIST)) | ||||||
|         self.client.put_object( |     client.put_object(Bucket=bucket_name, Key="simple_csv", Body=SIMPLE_CSV) | ||||||
|             Bucket=self.bucket_name, Key="simple.json", Body=json.dumps(SIMPLE_JSON) |     client.put_object( | ||||||
|         ) |         Bucket=bucket_name, | ||||||
|         self.client.put_object( |         Key="extensive.json", | ||||||
|             Bucket=self.bucket_name, Key="list.json", Body=json.dumps(SIMPLE_LIST) |         Body=json.dumps(EXTENSIVE_JSON), | ||||||
|         ) |     ) | ||||||
|         self.client.put_object( |     client.put_object( | ||||||
|             Bucket=self.bucket_name, Key="simple_csv", Body=SIMPLE_CSV |         Bucket=bucket_name, | ||||||
|         ) |         Key="nested.json", | ||||||
|         self.client.put_object( |         Body=json.dumps(NESTED_JSON), | ||||||
|             Bucket=self.bucket_name, |     ) | ||||||
|             Key="extensive.json", |  | ||||||
|             Body=json.dumps(EXTENSIVE_JSON), |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|     def tearDown(self) -> None: |  | ||||||
|         self.client.delete_object(Bucket=self.bucket_name, Key="list.json") |  | ||||||
|         self.client.delete_object(Bucket=self.bucket_name, Key="simple.json") |  | ||||||
|         self.client.delete_object(Bucket=self.bucket_name, Key="simple_csv") |  | ||||||
|         self.client.delete_object(Bucket=self.bucket_name, Key="extensive.json") |  | ||||||
|         self.client.delete_bucket(Bucket=self.bucket_name) |  | ||||||
| 
 | 
 | ||||||
|     def test_query_all(self): | @pytest.mark.aws_verified | ||||||
|         content = self.client.select_object_content( | @s3_aws_verified | ||||||
|             Bucket=self.bucket_name, | def test_query_all(bucket_name=None): | ||||||
|             Key="simple.json", |     client = boto3.client("s3", "us-east-1") | ||||||
|             Expression="SELECT * FROM S3Object", |     create_test_files(bucket_name) | ||||||
|             ExpressionType="SQL", |     content = client.select_object_content( | ||||||
|             InputSerialization={"JSON": {"Type": "DOCUMENT"}}, |         Bucket=bucket_name, | ||||||
|             OutputSerialization={"JSON": {"RecordDelimiter": ","}}, |         Key="simple.json", | ||||||
|         ) |         Expression="SELECT * FROM S3Object", | ||||||
|         result = list(content["Payload"]) |         ExpressionType="SQL", | ||||||
|         assert {"Records": {"Payload": b'{"a1":"b1","a2":"b2","a3":null},'}} in result |         InputSerialization={"JSON": {"Type": "DOCUMENT"}}, | ||||||
|  |         OutputSerialization={"JSON": {"RecordDelimiter": ","}}, | ||||||
|  |     ) | ||||||
|  |     result = list(content["Payload"]) | ||||||
|  |     assert {"Records": {"Payload": b'{"a1":"b1","a2":"b2","a3":null},'}} in result | ||||||
| 
 | 
 | ||||||
|         # Verify result is valid JSON |     # Verify result is valid JSON | ||||||
|         json.loads(result[0]["Records"]["Payload"][0:-1].decode("utf-8")) |     json.loads(result[0]["Records"]["Payload"][0:-1].decode("utf-8")) | ||||||
| 
 | 
 | ||||||
|         # Verify result contains metadata |     # Verify result contains metadata | ||||||
|         assert { |     stats = [res for res in result if "Stats" in res][0]["Stats"] | ||||||
|             "Stats": { |     assert "BytesScanned" in stats["Details"] | ||||||
|                 "Details": { |     assert "BytesProcessed" in stats["Details"] | ||||||
|                     "BytesScanned": 24, |     assert "BytesReturned" in stats["Details"] | ||||||
|                     "BytesProcessed": 24, |     assert {"End": {}} in result | ||||||
|                     "BytesReturned": 22, |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         } in result |  | ||||||
|         assert {"End": {}} in result |  | ||||||
| 
 | 
 | ||||||
|     def test_count_function(self): |  | ||||||
|         content = self.client.select_object_content( |  | ||||||
|             Bucket=self.bucket_name, |  | ||||||
|             Key="simple.json", |  | ||||||
|             Expression="SELECT count(*) FROM S3Object", |  | ||||||
|             ExpressionType="SQL", |  | ||||||
|             InputSerialization={"JSON": {"Type": "DOCUMENT"}}, |  | ||||||
|             OutputSerialization={"JSON": {"RecordDelimiter": ","}}, |  | ||||||
|         ) |  | ||||||
|         result = list(content["Payload"]) |  | ||||||
|         assert {"Records": {"Payload": b'{"_1":1},'}} in result |  | ||||||
| 
 | 
 | ||||||
|     @pytest.mark.xfail(message="Not yet implement in our parser") | @pytest.mark.aws_verified | ||||||
|     def test_count_as(self): | @s3_aws_verified | ||||||
|         content = self.client.select_object_content( | def test_count_function(bucket_name=None): | ||||||
|             Bucket=self.bucket_name, |     client = boto3.client("s3", "us-east-1") | ||||||
|             Key="simple.json", |     create_test_files(bucket_name) | ||||||
|             Expression="SELECT count(*) as cnt FROM S3Object", |     content = client.select_object_content( | ||||||
|             ExpressionType="SQL", |         Bucket=bucket_name, | ||||||
|             InputSerialization={"JSON": {"Type": "DOCUMENT"}}, |         Key="simple.json", | ||||||
|             OutputSerialization={"JSON": {"RecordDelimiter": ","}}, |         Expression="SELECT count(*) FROM S3Object", | ||||||
|         ) |         ExpressionType="SQL", | ||||||
|         result = list(content["Payload"]) |         InputSerialization={"JSON": {"Type": "DOCUMENT"}}, | ||||||
|         assert {"Records": {"Payload": b'{"cnt":1},'}} in result |         OutputSerialization={"JSON": {"RecordDelimiter": ","}}, | ||||||
|  |     ) | ||||||
|  |     result = list(content["Payload"]) | ||||||
|  |     assert {"Records": {"Payload": b'{"_1":1},'}} in result | ||||||
| 
 | 
 | ||||||
|     @pytest.mark.xfail(message="Not yet implement in our parser") |  | ||||||
|     def test_count_list_as(self): |  | ||||||
|         content = self.client.select_object_content( |  | ||||||
|             Bucket=self.bucket_name, |  | ||||||
|             Key="list.json", |  | ||||||
|             Expression="SELECT count(*) as cnt FROM S3Object", |  | ||||||
|             ExpressionType="SQL", |  | ||||||
|             InputSerialization={"JSON": {"Type": "DOCUMENT"}}, |  | ||||||
|             OutputSerialization={"JSON": {"RecordDelimiter": ","}}, |  | ||||||
|         ) |  | ||||||
|         result = list(content["Payload"]) |  | ||||||
|         assert {"Records": {"Payload": b'{"cnt":1},'}} in result |  | ||||||
| 
 | 
 | ||||||
|     def test_count_csv(self): | @pytest.mark.aws_verified | ||||||
|         content = self.client.select_object_content( | @s3_aws_verified | ||||||
|             Bucket=self.bucket_name, | @pytest.mark.xfail(message="Not yet implement in our parser") | ||||||
|             Key="simple_csv", | def test_count_as(bucket_name=None): | ||||||
|             Expression="SELECT count(*) FROM S3Object", |     client = boto3.client("s3", "us-east-1") | ||||||
|             ExpressionType="SQL", |     create_test_files(bucket_name) | ||||||
|             InputSerialization={ |     content = client.select_object_content( | ||||||
|                 "CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","} |         Bucket=bucket_name, | ||||||
|             }, |         Key="simple.json", | ||||||
|             OutputSerialization={"JSON": {"RecordDelimiter": ","}}, |         Expression="SELECT count(*) as cnt FROM S3Object", | ||||||
|         ) |         ExpressionType="SQL", | ||||||
|         result = list(content["Payload"]) |         InputSerialization={"JSON": {"Type": "DOCUMENT"}}, | ||||||
|         assert {"Records": {"Payload": b'{"_1":3},'}} in result |         OutputSerialization={"JSON": {"RecordDelimiter": ","}}, | ||||||
|  |     ) | ||||||
|  |     result = list(content["Payload"]) | ||||||
|  |     assert {"Records": {"Payload": b'{"cnt":1},'}} in result | ||||||
| 
 | 
 | ||||||
|     def test_default_record_delimiter(self): |  | ||||||
|         content = self.client.select_object_content( |  | ||||||
|             Bucket=self.bucket_name, |  | ||||||
|             Key="simple_csv", |  | ||||||
|             Expression="SELECT count(*) FROM S3Object", |  | ||||||
|             ExpressionType="SQL", |  | ||||||
|             InputSerialization={ |  | ||||||
|                 "CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","} |  | ||||||
|             }, |  | ||||||
|             # RecordDelimiter is not specified - should default to new line (\n) |  | ||||||
|             OutputSerialization={"JSON": {}}, |  | ||||||
|         ) |  | ||||||
|         result = list(content["Payload"]) |  | ||||||
|         assert {"Records": {"Payload": b'{"_1":3}\n'}} in result |  | ||||||
| 
 | 
 | ||||||
|     def test_extensive_json__select_list(self): | @pytest.mark.aws_verified | ||||||
|         content = self.client.select_object_content( | @s3_aws_verified | ||||||
|             Bucket=self.bucket_name, | @pytest.mark.xfail(message="Not yet implement in our parser") | ||||||
|             Key="extensive.json", | def test_count_list_as(bucket_name=None): | ||||||
|             Expression="select * from s3object[*].staff[*] s", |     client = boto3.client("s3", "us-east-1") | ||||||
|             ExpressionType="SQL", |     create_test_files(bucket_name) | ||||||
|             InputSerialization={"JSON": {"Type": "DOCUMENT"}}, |     content = client.select_object_content( | ||||||
|             OutputSerialization={"JSON": {"RecordDelimiter": ","}}, |         Bucket=bucket_name, | ||||||
|         ) |         Key="list.json", | ||||||
|         result = list(content["Payload"]) |         Expression="SELECT count(*) as cnt FROM S3Object", | ||||||
|         assert {"Records": {"Payload": b"{},"}} in result |         ExpressionType="SQL", | ||||||
|  |         InputSerialization={"JSON": {"Type": "DOCUMENT"}}, | ||||||
|  |         OutputSerialization={"JSON": {"RecordDelimiter": ","}}, | ||||||
|  |     ) | ||||||
|  |     result = list(content["Payload"]) | ||||||
|  |     assert {"Records": {"Payload": b'{"cnt":1},'}} in result | ||||||
| 
 | 
 | ||||||
|     def test_extensive_json__select_all(self): | 
 | ||||||
|         content = self.client.select_object_content( | @pytest.mark.aws_verified | ||||||
|             Bucket=self.bucket_name, | @s3_aws_verified | ||||||
|             Key="extensive.json", | def test_count_csv(bucket_name=None): | ||||||
|             Expression="select * from s3object s", |     client = boto3.client("s3", "us-east-1") | ||||||
|             ExpressionType="SQL", |     create_test_files(bucket_name) | ||||||
|             InputSerialization={"JSON": {"Type": "DOCUMENT"}}, |     content = client.select_object_content( | ||||||
|             OutputSerialization={"JSON": {"RecordDelimiter": ","}}, |         Bucket=bucket_name, | ||||||
|         ) |         Key="simple_csv", | ||||||
|         result = list(content["Payload"]) |         Expression="SELECT count(*) FROM S3Object", | ||||||
|         assert { |         ExpressionType="SQL", | ||||||
|             "Records": { |         InputSerialization={"CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","}}, | ||||||
|                 "Payload": b'{"_1":[{"staff":[{"name":"Janelyn M","city":"Chicago","kids":2},{"name":"Stacy P","city":"Seattle","kids":1}],"country":"USA"}]},' |         OutputSerialization={"JSON": {"RecordDelimiter": ","}}, | ||||||
|             } |     ) | ||||||
|         } in result |     result = list(content["Payload"]) | ||||||
|  |     assert {"Records": {"Payload": b'{"_1":3},'}} in result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
|  | @s3_aws_verified | ||||||
|  | def test_default_record_delimiter(bucket_name=None): | ||||||
|  |     client = boto3.client("s3", "us-east-1") | ||||||
|  |     create_test_files(bucket_name) | ||||||
|  |     content = client.select_object_content( | ||||||
|  |         Bucket=bucket_name, | ||||||
|  |         Key="simple_csv", | ||||||
|  |         Expression="SELECT count(*) FROM S3Object", | ||||||
|  |         ExpressionType="SQL", | ||||||
|  |         InputSerialization={"CSV": {"FileHeaderInfo": "USE", "FieldDelimiter": ","}}, | ||||||
|  |         # RecordDelimiter is not specified - should default to new line (\n) | ||||||
|  |         OutputSerialization={"JSON": {}}, | ||||||
|  |     ) | ||||||
|  |     result = list(content["Payload"]) | ||||||
|  |     assert {"Records": {"Payload": b'{"_1":3}\n'}} in result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
|  | @s3_aws_verified | ||||||
|  | def test_extensive_json__select_list(bucket_name=None): | ||||||
|  |     client = boto3.client("s3", "us-east-1") | ||||||
|  |     create_test_files(bucket_name) | ||||||
|  |     content = client.select_object_content( | ||||||
|  |         Bucket=bucket_name, | ||||||
|  |         Key="extensive.json", | ||||||
|  |         Expression="select * from s3object[*].staff[*] s", | ||||||
|  |         ExpressionType="SQL", | ||||||
|  |         InputSerialization={"JSON": {"Type": "DOCUMENT"}}, | ||||||
|  |         OutputSerialization={"JSON": {"RecordDelimiter": ","}}, | ||||||
|  |     ) | ||||||
|  |     result = list(content["Payload"]) | ||||||
|  |     assert {"Records": {"Payload": b"{},"}} in result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
|  | @s3_aws_verified | ||||||
|  | def test_extensive_json__select_all(bucket_name=None): | ||||||
|  |     client = boto3.client("s3", "us-east-1") | ||||||
|  |     create_test_files(bucket_name) | ||||||
|  |     content = client.select_object_content( | ||||||
|  |         Bucket=bucket_name, | ||||||
|  |         Key="extensive.json", | ||||||
|  |         Expression="select * from s3object s", | ||||||
|  |         ExpressionType="SQL", | ||||||
|  |         InputSerialization={"JSON": {"Type": "DOCUMENT"}}, | ||||||
|  |         OutputSerialization={"JSON": {"RecordDelimiter": ","}}, | ||||||
|  |     ) | ||||||
|  |     result = list(content["Payload"]) | ||||||
|  |     records = [res for res in result if "Records" in res][0]["Records"][ | ||||||
|  |         "Payload" | ||||||
|  |     ].decode("utf-8") | ||||||
|  | 
 | ||||||
|  |     # For some reason, AWS returns records with a comma at the end | ||||||
|  |     assert records[-1] == "," | ||||||
|  | 
 | ||||||
|  |     # Because the original doc is a list, it is returned like this | ||||||
|  |     assert json.loads(records[:-1]) == {"_1": EXTENSIVE_JSON} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
|  | @s3_aws_verified | ||||||
|  | def test_nested_json__select_all(bucket_name=None): | ||||||
|  |     client = boto3.client("s3", "us-east-1") | ||||||
|  |     create_test_files(bucket_name) | ||||||
|  |     content = client.select_object_content( | ||||||
|  |         Bucket=bucket_name, | ||||||
|  |         Key="nested.json", | ||||||
|  |         Expression="select * from s3object s", | ||||||
|  |         ExpressionType="SQL", | ||||||
|  |         InputSerialization={"JSON": {"Type": "DOCUMENT"}}, | ||||||
|  |         OutputSerialization={"JSON": {"RecordDelimiter": ","}}, | ||||||
|  |     ) | ||||||
|  |     result = list(content["Payload"]) | ||||||
|  |     records = [res for res in result if "Records" in res][0]["Records"][ | ||||||
|  |         "Payload" | ||||||
|  |     ].decode("utf-8") | ||||||
|  | 
 | ||||||
|  |     # For some reason, AWS returns records with a comma at the end | ||||||
|  |     assert records[-1] == "," | ||||||
|  | 
 | ||||||
|  |     assert json.loads(records[:-1]) == NESTED_JSON | ||||||
|  | |||||||
| @ -1297,6 +1297,7 @@ def test_render_template(): | |||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.aws_verified | ||||||
| @ses_aws_verified | @ses_aws_verified | ||||||
| def test_render_template__advanced(): | def test_render_template__advanced(): | ||||||
|     conn = boto3.client("ses", region_name="us-east-1") |     conn = boto3.client("ses", region_name="us-east-1") | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user