Use TaggingService for S3 - Cleanup

This commit is contained in:
Bert Blommers 2020-04-01 15:35:25 +01:00
parent f7ad4cbc09
commit 8dbfd43c5c
4 changed files with 40 additions and 92 deletions

View File

@ -35,7 +35,6 @@ from .exceptions import (
MalformedXML, MalformedXML,
InvalidStorageClass, InvalidStorageClass,
InvalidTargetBucketForLogging, InvalidTargetBucketForLogging,
DuplicateTagKeys,
CrossLocationLoggingProhibitted, CrossLocationLoggingProhibitted,
NoSuchPublicAccessBlockConfiguration, NoSuchPublicAccessBlockConfiguration,
InvalidPublicAccessBlockConfiguration, InvalidPublicAccessBlockConfiguration,
@ -473,26 +472,10 @@ def get_canned_acl(acl):
return FakeAcl(grants=grants) return FakeAcl(grants=grants)
class FakeTagging(BaseModel):
def __init__(self, tag_set=None):
self.tag_set = tag_set or FakeTagSet()
class FakeTagSet(BaseModel):
def __init__(self, tags=None):
self.tags = tags or []
class FakeTag(BaseModel):
def __init__(self, key, value=None):
self.key = key
self.value = value
class LifecycleFilter(BaseModel): class LifecycleFilter(BaseModel):
def __init__(self, prefix=None, tag=None, and_filter=None): def __init__(self, prefix=None, tag=None, and_filter=None):
self.prefix = prefix self.prefix = prefix
self.tag = tag (self.tag_key, self.tag_value) = tag if tag else (None, None)
self.and_filter = and_filter self.and_filter = and_filter
def to_config_dict(self): def to_config_dict(self):
@ -501,11 +484,11 @@ class LifecycleFilter(BaseModel):
"predicate": {"type": "LifecyclePrefixPredicate", "prefix": self.prefix} "predicate": {"type": "LifecyclePrefixPredicate", "prefix": self.prefix}
} }
elif self.tag: elif self.tag_key:
return { return {
"predicate": { "predicate": {
"type": "LifecycleTagPredicate", "type": "LifecycleTagPredicate",
"tag": {"key": self.tag.key, "value": self.tag.value}, "tag": {"key": self.tag_key, "value": self.tag_value},
} }
} }
@ -529,12 +512,9 @@ class LifecycleAndFilter(BaseModel):
if self.prefix is not None: if self.prefix is not None:
data.append({"type": "LifecyclePrefixPredicate", "prefix": self.prefix}) data.append({"type": "LifecyclePrefixPredicate", "prefix": self.prefix})
for tag in self.tags: for key, value in self.tags.items():
data.append( data.append(
{ {"type": "LifecycleTagPredicate", "tag": {"key": key, "value": value},}
"type": "LifecycleTagPredicate",
"tag": {"key": tag.key, "value": tag.value},
}
) )
return data return data
@ -880,7 +860,7 @@ class FakeBucket(BaseModel):
and_filter = None and_filter = None
if rule["Filter"].get("And"): if rule["Filter"].get("And"):
filters += 1 filters += 1
and_tags = [] and_tags = {}
if rule["Filter"]["And"].get("Tag"): if rule["Filter"]["And"].get("Tag"):
if not isinstance(rule["Filter"]["And"]["Tag"], list): if not isinstance(rule["Filter"]["And"]["Tag"], list):
rule["Filter"]["And"]["Tag"] = [ rule["Filter"]["And"]["Tag"] = [
@ -888,7 +868,7 @@ class FakeBucket(BaseModel):
] ]
for t in rule["Filter"]["And"]["Tag"]: for t in rule["Filter"]["And"]["Tag"]:
and_tags.append(FakeTag(t["Key"], t.get("Value", ""))) and_tags[t["Key"]] = t.get("Value", "")
try: try:
and_prefix = ( and_prefix = (
@ -902,7 +882,7 @@ class FakeBucket(BaseModel):
filter_tag = None filter_tag = None
if rule["Filter"].get("Tag"): if rule["Filter"].get("Tag"):
filters += 1 filters += 1
filter_tag = FakeTag( filter_tag = (
rule["Filter"]["Tag"]["Key"], rule["Filter"]["Tag"]["Key"],
rule["Filter"]["Tag"].get("Value", ""), rule["Filter"]["Tag"].get("Value", ""),
) )
@ -989,16 +969,6 @@ class FakeBucket(BaseModel):
def delete_cors(self): def delete_cors(self):
self.cors = [] self.cors = []
def set_tags(self, tagging):
self.tags = tagging
def delete_tags(self):
self.tags = FakeTagging()
@property
def tagging(self):
return self.tags
def set_logging(self, logging_config, bucket_backend): def set_logging(self, logging_config, bucket_backend):
if not logging_config: if not logging_config:
self.logging = {} self.logging = {}
@ -1359,13 +1329,12 @@ class S3Backend(BaseBackend):
def get_key_tags(self, key): def get_key_tags(self, key):
return self.tagger.list_tags_for_resource(key.arn) return self.tagger.list_tags_for_resource(key.arn)
def set_key_tags(self, key, tagging, key_name=None): def set_key_tags(self, key, tags, key_name=None):
if key is None: if key is None:
raise MissingKey(key_name) raise MissingKey(key_name)
self.tagger.delete_all_tags_for_resource(key.arn) self.tagger.delete_all_tags_for_resource(key.arn)
self.tagger.tag_resource( self.tagger.tag_resource(
key.arn, key.arn, [{"Key": key, "Value": value} for key, value in tags.items()],
[{"Key": tag.key, "Value": tag.value} for tag in tagging.tag_set.tags],
) )
return key return key
@ -1373,15 +1342,11 @@ class S3Backend(BaseBackend):
bucket = self.get_bucket(bucket_name) bucket = self.get_bucket(bucket_name)
return self.tagger.list_tags_for_resource(bucket.arn) return self.tagger.list_tags_for_resource(bucket.arn)
def put_bucket_tagging(self, bucket_name, tagging): def put_bucket_tags(self, bucket_name, tags):
tag_keys = [tag.key for tag in tagging.tag_set.tags]
if len(tag_keys) != len(set(tag_keys)):
raise DuplicateTagKeys()
bucket = self.get_bucket(bucket_name) bucket = self.get_bucket(bucket_name)
self.tagger.delete_all_tags_for_resource(bucket.arn) self.tagger.delete_all_tags_for_resource(bucket.arn)
self.tagger.tag_resource( self.tagger.tag_resource(
bucket.arn, bucket.arn, [{"Key": key, "Value": value} for key, value in tags.items()],
[{"Key": tag.key, "Value": tag.value} for tag in tagging.tag_set.tags],
) )
def delete_bucket_tagging(self, bucket_name): def delete_bucket_tagging(self, bucket_name):

View File

@ -24,6 +24,7 @@ from moto.s3bucket_path.utils import (
from .exceptions import ( from .exceptions import (
BucketAlreadyExists, BucketAlreadyExists,
DuplicateTagKeys,
S3ClientError, S3ClientError,
MissingBucket, MissingBucket,
MissingKey, MissingKey,
@ -43,9 +44,6 @@ from .models import (
FakeGrant, FakeGrant,
FakeAcl, FakeAcl,
FakeKey, FakeKey,
FakeTagging,
FakeTagSet,
FakeTag,
) )
from .utils import ( from .utils import (
bucket_name_from_url, bucket_name_from_url,
@ -652,7 +650,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
return "" return ""
elif "tagging" in querystring: elif "tagging" in querystring:
tagging = self._bucket_tagging_from_xml(body) tagging = self._bucket_tagging_from_xml(body)
self.backend.put_bucket_tagging(bucket_name, tagging) self.backend.put_bucket_tags(bucket_name, tagging)
return "" return ""
elif "website" in querystring: elif "website" in querystring:
self.backend.set_bucket_website_configuration(bucket_name, body) self.backend.set_bucket_website_configuration(bucket_name, body)
@ -1361,55 +1359,45 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
return None return None
def _tagging_from_headers(self, headers): def _tagging_from_headers(self, headers):
tags = {}
if headers.get("x-amz-tagging"): if headers.get("x-amz-tagging"):
parsed_header = parse_qs(headers["x-amz-tagging"], keep_blank_values=True) parsed_header = parse_qs(headers["x-amz-tagging"], keep_blank_values=True)
tags = []
for tag in parsed_header.items(): for tag in parsed_header.items():
tags.append(FakeTag(tag[0], tag[1][0])) tags[tag[0]] = tag[1][0]
return tags
tag_set = FakeTagSet(tags)
tagging = FakeTagging(tag_set)
return tagging
else:
return FakeTagging()
def _tagging_from_xml(self, xml): def _tagging_from_xml(self, xml):
parsed_xml = xmltodict.parse(xml, force_list={"Tag": True}) parsed_xml = xmltodict.parse(xml, force_list={"Tag": True})
tags = [] tags = {}
for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]: for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]:
tags.append(FakeTag(tag["Key"], tag["Value"])) tags[tag["Key"]] = tag["Value"]
tag_set = FakeTagSet(tags) return tags
tagging = FakeTagging(tag_set)
return tagging
def _bucket_tagging_from_xml(self, xml): def _bucket_tagging_from_xml(self, xml):
parsed_xml = xmltodict.parse(xml) parsed_xml = xmltodict.parse(xml)
tags = [] tags = {}
# Optional if no tags are being sent: # Optional if no tags are being sent:
if parsed_xml["Tagging"].get("TagSet"): if parsed_xml["Tagging"].get("TagSet"):
# If there is only 1 tag, then it's not a list: # If there is only 1 tag, then it's not a list:
if not isinstance(parsed_xml["Tagging"]["TagSet"]["Tag"], list): if not isinstance(parsed_xml["Tagging"]["TagSet"]["Tag"], list):
tags.append( tags[parsed_xml["Tagging"]["TagSet"]["Tag"]["Key"]] = parsed_xml[
FakeTag( "Tagging"
parsed_xml["Tagging"]["TagSet"]["Tag"]["Key"], ]["TagSet"]["Tag"]["Value"]
parsed_xml["Tagging"]["TagSet"]["Tag"]["Value"],
)
)
else: else:
for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]: for tag in parsed_xml["Tagging"]["TagSet"]["Tag"]:
tags.append(FakeTag(tag["Key"], tag["Value"])) if tag["Key"] in tags:
raise DuplicateTagKeys()
tags[tag["Key"]] = tag["Value"]
# Verify that "aws:" is not in the tags. If so, then this is a problem: # Verify that "aws:" is not in the tags. If so, then this is a problem:
for tag in tags: for key, _ in tags.items():
if tag.key.startswith("aws:"): if key.startswith("aws:"):
raise NoSystemTags() raise NoSystemTags()
tag_set = FakeTagSet(tags) return tags
tagging = FakeTagging(tag_set)
return tagging
def _cors_from_xml(self, xml): def _cors_from_xml(self, xml):
parsed_xml = xmltodict.parse(xml) parsed_xml = xmltodict.parse(xml)
@ -1730,10 +1718,10 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
{% if rule.filter.prefix != None %} {% if rule.filter.prefix != None %}
<Prefix>{{ rule.filter.prefix }}</Prefix> <Prefix>{{ rule.filter.prefix }}</Prefix>
{% endif %} {% endif %}
{% if rule.filter.tag %} {% if rule.filter.tag_key %}
<Tag> <Tag>
<Key>{{ rule.filter.tag.key }}</Key> <Key>{{ rule.filter.tag_key }}</Key>
<Value>{{ rule.filter.tag.value }}</Value> <Value>{{ rule.filter.tag_value }}</Value>
</Tag> </Tag>
{% endif %} {% endif %}
{% if rule.filter.and_filter %} {% if rule.filter.and_filter %}
@ -1741,10 +1729,10 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
{% if rule.filter.and_filter.prefix != None %} {% if rule.filter.and_filter.prefix != None %}
<Prefix>{{ rule.filter.and_filter.prefix }}</Prefix> <Prefix>{{ rule.filter.and_filter.prefix }}</Prefix>
{% endif %} {% endif %}
{% for tag in rule.filter.and_filter.tags %} {% for key, value in rule.filter.and_filter.tags.items() %}
<Tag> <Tag>
<Key>{{ tag.key }}</Key> <Key>{{ key }}</Key>
<Value>{{ tag.value }}</Value> <Value>{{ value }}</Value>
</Tag> </Tag>
{% endfor %} {% endfor %}
</And> </And>

View File

@ -11,6 +11,8 @@ from moto import mock_s3
from moto.config import mock_config from moto.config import mock_config
from moto.core import ACCOUNT_ID from moto.core import ACCOUNT_ID
import sure # noqa
@mock_config @mock_config
def test_put_configuration_recorder(): def test_put_configuration_recorder():

View File

@ -4295,24 +4295,17 @@ def test_s3_config_dict():
FakeAcl, FakeAcl,
FakeGrant, FakeGrant,
FakeGrantee, FakeGrantee,
FakeTag,
FakeTagging,
FakeTagSet,
OWNER, OWNER,
) )
# Without any buckets: # Without any buckets:
assert not s3_config_query.get_config_resource("some_bucket") assert not s3_config_query.get_config_resource("some_bucket")
tags = FakeTagging( tags = {"someTag": "someValue", "someOtherTag": "someOtherValue"}
FakeTagSet(
[FakeTag("someTag", "someValue"), FakeTag("someOtherTag", "someOtherValue")]
)
)
# With 1 bucket in us-west-2: # With 1 bucket in us-west-2:
s3_config_query.backends["global"].create_bucket("bucket1", "us-west-2") s3_config_query.backends["global"].create_bucket("bucket1", "us-west-2")
s3_config_query.backends["global"].put_bucket_tagging("bucket1", tags) s3_config_query.backends["global"].put_bucket_tags("bucket1", tags)
# With a log bucket: # With a log bucket:
s3_config_query.backends["global"].create_bucket("logbucket", "us-west-2") s3_config_query.backends["global"].create_bucket("logbucket", "us-west-2")