Improve implementation coverage

This commit is contained in:
Bert Blommers 2020-06-06 13:15:50 +01:00
parent 5508da186b
commit 20784a2d67
7 changed files with 378 additions and 67 deletions

File diff suppressed because it is too large Load Diff

View File

@ -218,7 +218,7 @@ class LambdaFunction(BaseModel):
key = None
try:
# FIXME: does not validate bucket region
key = s3_backend.get_key(self.code["S3Bucket"], self.code["S3Key"])
key = s3_backend.get_object(self.code["S3Bucket"], self.code["S3Key"])
except MissingBucket:
if do_validate_s3():
raise InvalidParameterValueException(
@ -344,7 +344,7 @@ class LambdaFunction(BaseModel):
key = None
try:
# FIXME: does not validate bucket region
key = s3_backend.get_key(
key = s3_backend.get_object(
updated_spec["S3Bucket"], updated_spec["S3Key"]
)
except MissingBucket:

View File

@ -541,7 +541,7 @@ class ResourceMap(collections_abc.Mapping):
if name == "AWS::Include":
location = params["Location"]
bucket_name, name = bucket_and_name_from_url(location)
key = s3_backend.get_key(bucket_name, name)
key = s3_backend.get_object(bucket_name, name)
self._parsed_resources.update(json.loads(key.value))
def load_parameters(self):

View File

@ -36,7 +36,7 @@ class CloudFormationResponse(BaseResponse):
bucket_name = template_url_parts.netloc.split(".")[0]
key_name = template_url_parts.path.lstrip("/")
key = s3_backend.get_key(bucket_name, key_name)
key = s3_backend.get_object(bucket_name, key_name)
return key.value.decode("utf-8")
def create_stack(self):

View File

@ -1315,7 +1315,7 @@ class S3Backend(BaseBackend):
return self.account_public_access_block
def set_key(
def set_object(
self, bucket_name, key_name, value, storage=None, etag=None, multipart=None
):
key_name = clean_key_name(key_name)
@ -1346,11 +1346,11 @@ class S3Backend(BaseBackend):
def append_to_key(self, bucket_name, key_name, value):
key_name = clean_key_name(key_name)
key = self.get_key(bucket_name, key_name)
key = self.get_object(bucket_name, key_name)
key.append_to_value(value)
return key
def get_key(self, bucket_name, key_name, version_id=None, part_number=None):
def get_object(self, bucket_name, key_name, version_id=None, part_number=None):
key_name = clean_key_name(key_name)
bucket = self.get_bucket(bucket_name)
key = None
@ -1385,11 +1385,11 @@ class S3Backend(BaseBackend):
)
return key
def get_bucket_tags(self, bucket_name):
def get_bucket_tagging(self, bucket_name):
bucket = self.get_bucket(bucket_name)
return self.tagger.list_tags_for_resource(bucket.arn)
def put_bucket_tags(self, bucket_name, tags):
def put_bucket_tagging(self, bucket_name, tags):
bucket = self.get_bucket(bucket_name)
self.tagger.delete_all_tags_for_resource(bucket.arn)
self.tagger.tag_resource(
@ -1481,7 +1481,7 @@ class S3Backend(BaseBackend):
return
del bucket.multiparts[multipart_id]
key = self.set_key(
key = self.set_object(
bucket_name, multipart.key_name, value, etag=etag, multipart=multipart
)
key.set_metadata(multipart.metadata)
@ -1521,7 +1521,7 @@ class S3Backend(BaseBackend):
dest_bucket = self.get_bucket(dest_bucket_name)
multipart = dest_bucket.multiparts[multipart_id]
src_value = self.get_key(
src_value = self.get_object(
src_bucket_name, src_key_name, version_id=src_version_id
).value
if start_byte is not None:
@ -1565,7 +1565,7 @@ class S3Backend(BaseBackend):
bucket = self.get_bucket(bucket_name)
bucket.keys[key_name] = FakeDeleteMarker(key=bucket.keys[key_name])
def delete_key(self, bucket_name, key_name, version_id=None):
def delete_object(self, bucket_name, key_name, version_id=None):
key_name = clean_key_name(key_name)
bucket = self.get_bucket(bucket_name)
@ -1606,7 +1606,7 @@ class S3Backend(BaseBackend):
src_key_name = clean_key_name(src_key_name)
dest_key_name = clean_key_name(dest_key_name)
dest_bucket = self.get_bucket(dest_bucket_name)
key = self.get_key(src_bucket_name, src_key_name, version_id=src_version_id)
key = self.get_object(src_bucket_name, src_key_name, version_id=src_version_id)
new_key = key.copy(dest_key_name, dest_bucket.is_versioned)
self.tagger.copy_tags(key.arn, new_key.arn)
@ -1626,5 +1626,17 @@ class S3Backend(BaseBackend):
bucket = self.get_bucket(bucket_name)
return bucket.acl
def get_bucket_cors(self, bucket_name):
bucket = self.get_bucket(bucket_name)
return bucket.cors
def get_bucket_logging(self, bucket_name):
bucket = self.get_bucket(bucket_name)
return bucket.logging
def get_bucket_notification_configuration(self, bucket_name):
bucket = self.get_bucket(bucket_name)
return bucket.notification_configuration
s3_backend = S3Backend()

View File

@ -382,7 +382,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
template = self.response_template(S3_OBJECT_ACL_RESPONSE)
return template.render(obj=bucket)
elif "tagging" in querystring:
tags = self.backend.get_bucket_tags(bucket_name)["Tags"]
tags = self.backend.get_bucket_tagging(bucket_name)["Tags"]
# "Special Error" if no tags:
if len(tags) == 0:
template = self.response_template(S3_NO_BUCKET_TAGGING)
@ -390,25 +390,27 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
template = self.response_template(S3_OBJECT_TAGGING_RESPONSE)
return template.render(tags=tags)
elif "logging" in querystring:
bucket = self.backend.get_bucket(bucket_name)
if not bucket.logging:
logging = self.backend.get_bucket_logging(bucket_name)
if not logging:
template = self.response_template(S3_NO_LOGGING_CONFIG)
return 200, {}, template.render()
template = self.response_template(S3_LOGGING_CONFIG)
return 200, {}, template.render(logging=bucket.logging)
return 200, {}, template.render(logging=logging)
elif "cors" in querystring:
bucket = self.backend.get_bucket(bucket_name)
if len(bucket.cors) == 0:
cors = self.backend.get_bucket_cors(bucket_name)
if len(cors) == 0:
template = self.response_template(S3_NO_CORS_CONFIG)
return 404, {}, template.render(bucket_name=bucket_name)
template = self.response_template(S3_BUCKET_CORS_RESPONSE)
return template.render(bucket=bucket)
return template.render(cors=cors)
elif "notification" in querystring:
bucket = self.backend.get_bucket(bucket_name)
if not bucket.notification_configuration:
notification_configuration = self.backend.get_bucket_notification_configuration(
bucket_name
)
if not notification_configuration:
return 200, {}, ""
template = self.response_template(S3_GET_BUCKET_NOTIFICATION_CONFIG)
return template.render(bucket=bucket)
return template.render(config=notification_configuration)
elif "accelerate" in querystring:
bucket = self.backend.get_bucket(bucket_name)
if bucket.accelerate_configuration is None:
@ -663,7 +665,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
return ""
elif "tagging" in querystring:
tagging = self._bucket_tagging_from_xml(body)
self.backend.put_bucket_tags(bucket_name, tagging)
self.backend.put_bucket_tagging(bucket_name, tagging)
return ""
elif "website" in querystring:
self.backend.set_bucket_website_configuration(bucket_name, body)
@ -840,7 +842,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
else:
status_code = 204
new_key = self.backend.set_key(bucket_name, key, f)
new_key = self.backend.set_object(bucket_name, key, f)
# Metadata
metadata = metadata_from_headers(form)
@ -879,7 +881,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
key_name = object_["Key"]
version_id = object_.get("VersionId", None)
success = self.backend.delete_key(
success = self.backend.delete_object(
bucket_name, undo_clean_key_name(key_name), version_id=version_id
)
if success:
@ -1056,7 +1058,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
signed_url = "Signature=" in request.url
elif hasattr(request, "requestline"):
signed_url = "Signature=" in request.path
key = self.backend.get_key(bucket_name, key_name)
key = self.backend.get_object(bucket_name, key_name)
if key:
if not key.acl.public_read and not signed_url:
@ -1118,7 +1120,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
)
version_id = query.get("versionId", [None])[0]
if_modified_since = headers.get("If-Modified-Since", None)
key = self.backend.get_key(bucket_name, key_name, version_id=version_id)
key = self.backend.get_object(bucket_name, key_name, version_id=version_id)
if key is None:
raise MissingKey(key_name)
if if_modified_since:
@ -1164,7 +1166,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
except ValueError:
start_byte, end_byte = None, None
if self.backend.get_key(src_bucket, src_key, version_id=src_version_id):
if self.backend.get_object(
src_bucket, src_key, version_id=src_version_id
):
key = self.backend.copy_part(
bucket_name,
upload_id,
@ -1193,7 +1197,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
tagging = self._tagging_from_headers(request.headers)
if "acl" in query:
key = self.backend.get_key(bucket_name, key_name)
key = self.backend.get_object(bucket_name, key_name)
# TODO: Support the XML-based ACL format
key.set_acl(acl)
return 200, response_headers, ""
@ -1203,7 +1207,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
version_id = query["versionId"][0]
else:
version_id = None
key = self.backend.get_key(bucket_name, key_name, version_id=version_id)
key = self.backend.get_object(bucket_name, key_name, version_id=version_id)
tagging = self._tagging_from_xml(body)
self.backend.set_key_tags(key, tagging, key_name)
return 200, response_headers, ""
@ -1221,7 +1225,9 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
)
src_version_id = parse_qs(src_key_parsed.query).get("versionId", [None])[0]
key = self.backend.get_key(src_bucket, src_key, version_id=src_version_id)
key = self.backend.get_object(
src_bucket, src_key, version_id=src_version_id
)
if key is not None:
if key.storage_class in ["GLACIER", "DEEP_ARCHIVE"]:
@ -1238,7 +1244,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
else:
return 404, response_headers, ""
new_key = self.backend.get_key(bucket_name, key_name)
new_key = self.backend.get_object(bucket_name, key_name)
mdirective = request.headers.get("x-amz-metadata-directive")
if mdirective is not None and mdirective == "REPLACE":
metadata = metadata_from_headers(request.headers)
@ -1254,13 +1260,13 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
closing_connection = headers.get("connection") == "close"
if closing_connection and streaming_request:
# Closing the connection of a streaming request. No more data
new_key = self.backend.get_key(bucket_name, key_name)
new_key = self.backend.get_object(bucket_name, key_name)
elif streaming_request:
# Streaming request, more data
new_key = self.backend.append_to_key(bucket_name, key_name, body)
else:
# Initial data
new_key = self.backend.set_key(
new_key = self.backend.set_object(
bucket_name, key_name, body, storage=storage_class
)
request.streaming = True
@ -1286,7 +1292,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
if if_modified_since:
if_modified_since = str_to_rfc_1123_datetime(if_modified_since)
key = self.backend.get_key(
key = self.backend.get_object(
bucket_name, key_name, version_id=version_id, part_number=part_number
)
if key:
@ -1596,7 +1602,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
self.backend.cancel_multipart(bucket_name, upload_id)
return 204, {}, ""
version_id = query.get("versionId", [None])[0]
self.backend.delete_key(bucket_name, key_name, version_id=version_id)
self.backend.delete_object(bucket_name, key_name, version_id=version_id)
return 204, {}, ""
def _complete_multipart_body(self, body):
@ -1633,7 +1639,7 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
elif "restore" in query:
es = minidom.parseString(body).getElementsByTagName("Days")
days = es[0].childNodes[0].wholeText
key = self.backend.get_key(bucket_name, key_name)
key = self.backend.get_object(bucket_name, key_name)
r = 202
if key.expiry_date is not None:
r = 200
@ -1959,7 +1965,7 @@ S3_OBJECT_TAGGING_RESPONSE = """\
S3_BUCKET_CORS_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
<CORSConfiguration>
{% for cors in bucket.cors %}
{% for cors in cors %}
<CORSRule>
{% for origin in cors.allowed_origins %}
<AllowedOrigin>{{ origin }}</AllowedOrigin>
@ -2192,7 +2198,7 @@ S3_NO_ENCRYPTION = """<?xml version="1.0" encoding="UTF-8"?>
S3_GET_BUCKET_NOTIFICATION_CONFIG = """<?xml version="1.0" encoding="UTF-8"?>
<NotificationConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
{% for topic in bucket.notification_configuration.topic %}
{% for topic in config.topic %}
<TopicConfiguration>
<Id>{{ topic.id }}</Id>
<Topic>{{ topic.arn }}</Topic>
@ -2213,7 +2219,7 @@ S3_GET_BUCKET_NOTIFICATION_CONFIG = """<?xml version="1.0" encoding="UTF-8"?>
{% endif %}
</TopicConfiguration>
{% endfor %}
{% for queue in bucket.notification_configuration.queue %}
{% for queue in config.queue %}
<QueueConfiguration>
<Id>{{ queue.id }}</Id>
<Queue>{{ queue.arn }}</Queue>
@ -2234,7 +2240,7 @@ S3_GET_BUCKET_NOTIFICATION_CONFIG = """<?xml version="1.0" encoding="UTF-8"?>
{% endif %}
</QueueConfiguration>
{% endfor %}
{% for cf in bucket.notification_configuration.cloud_function %}
{% for cf in config.cloud_function %}
<CloudFunctionConfiguration>
<Id>{{ cf.id }}</Id>
<CloudFunction>{{ cf.arn }}</CloudFunction>

View File

@ -4364,7 +4364,7 @@ def test_s3_config_dict():
# With 1 bucket in us-west-2:
s3_config_query.backends["global"].create_bucket("bucket1", "us-west-2")
s3_config_query.backends["global"].put_bucket_tags("bucket1", tags)
s3_config_query.backends["global"].put_bucket_tagging("bucket1", tags)
# With a log bucket:
s3_config_query.backends["global"].create_bucket("logbucket", "us-west-2")