Merge pull request #2505 from mikegrima/configFix

Fixed a bug with S3 bucket policies for AWS Config
This commit is contained in:
Mike Grima 2019-10-22 15:53:18 -07:00 committed by GitHub
commit 57dc6522f2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 26 additions and 7 deletions

View File

@ -946,7 +946,7 @@ class FakeBucket(BaseModel):
} }
s_config['BucketPolicy'] = { s_config['BucketPolicy'] = {
'policyText': self.policy if self.policy else None 'policyText': self.policy.decode('utf-8') if self.policy else None
} }
s_config['IsRequesterPaysEnabled'] = 'false' if self.payer == 'BucketOwner' else 'true' s_config['IsRequesterPaysEnabled'] = 'false' if self.payer == 'BucketOwner' else 'true'

View File

@ -3,6 +3,8 @@ from __future__ import unicode_literals
import datetime import datetime
import os import os
import sys
from six.moves.urllib.request import urlopen from six.moves.urllib.request import urlopen
from six.moves.urllib.error import HTTPError from six.moves.urllib.error import HTTPError
from functools import wraps from functools import wraps
@ -1218,11 +1220,6 @@ def test_key_with_trailing_slash_in_ordinary_calling_format():
[k.name for k in bucket.get_all_keys()].should.contain(key_name) [k.name for k in bucket.get_all_keys()].should.contain(key_name)
"""
boto3
"""
@mock_s3 @mock_s3
def test_boto3_key_etag(): def test_boto3_key_etag():
s3 = boto3.client('s3', region_name='us-east-1') s3 = boto3.client('s3', region_name='us-east-1')
@ -3695,6 +3692,24 @@ def test_s3_config_dict():
s3_config_query.backends['global'].set_bucket_acl('logbucket', log_acls) s3_config_query.backends['global'].set_bucket_acl('logbucket', log_acls)
s3_config_query.backends['global'].put_bucket_logging('bucket1', {'TargetBucket': 'logbucket', 'TargetPrefix': ''}) s3_config_query.backends['global'].put_bucket_logging('bucket1', {'TargetBucket': 'logbucket', 'TargetPrefix': ''})
policy = json.dumps({
'Statement': [
{
"Effect": "Deny",
"Action": "s3:DeleteObject",
"Principal": "*",
"Resource": "arn:aws:s3:::bucket1/*"
}
]
})
# The policy is a byte array -- need to encode in Python 3 -- for Python 2 just pass the raw string in:
if sys.version_info[0] > 2:
pass_policy = bytes(policy, 'utf-8')
else:
pass_policy = policy
s3_config_query.backends['global'].set_bucket_policy('bucket1', pass_policy)
# Get the us-west-2 bucket and verify that it works properly: # Get the us-west-2 bucket and verify that it works properly:
bucket1_result = s3_config_query.get_config_resource('bucket1') bucket1_result = s3_config_query.get_config_resource('bucket1')
@ -3714,7 +3729,7 @@ def test_s3_config_dict():
{'destinationBucketName': 'logbucket', 'logFilePrefix': ''} {'destinationBucketName': 'logbucket', 'logFilePrefix': ''}
# Verify the policy: # Verify the policy:
assert json.loads(bucket1_result['supplementaryConfiguration']['BucketPolicy']) == {'policyText': None} assert json.loads(bucket1_result['supplementaryConfiguration']['BucketPolicy']) == {'policyText': policy}
# Filter by correct region: # Filter by correct region:
assert bucket1_result == s3_config_query.get_config_resource('bucket1', resource_region='us-west-2') assert bucket1_result == s3_config_query.get_config_resource('bucket1', resource_region='us-west-2')
@ -3727,3 +3742,7 @@ def test_s3_config_dict():
# With an incorrect resource name: # With an incorrect resource name:
assert not s3_config_query.get_config_resource('bucket1', resource_name='eu-bucket-1') assert not s3_config_query.get_config_resource('bucket1', resource_name='eu-bucket-1')
# Verify that no bucket policy returns the proper value:
assert json.loads(s3_config_query.get_config_resource('logbucket')['supplementaryConfiguration']['BucketPolicy']) == \
{'policyText': None}