S3 Fixes for special metadata headers.

This commit is contained in:
Steve Pulec 2014-11-27 10:43:10 -05:00
parent 06a635aeaa
commit e88e3d5372
2 changed files with 24 additions and 2 deletions

View File

@ -1,9 +1,12 @@
from __future__ import unicode_literals
import six
from six.moves.urllib.parse import parse_qs, urlparse
import re
from boto.s3.key import Key
from jinja2 import Template
import six
from six.moves.urllib.parse import parse_qs, urlparse
from .exceptions import BucketAlreadyExists, MissingBucket
from .models import s3_backend
@ -232,8 +235,14 @@ class ResponseObject(object):
for header, value in request.headers.items():
if isinstance(header, six.string_types):
result = meta_regex.match(header)
meta_key = None
if result:
# Check for extra metadata
meta_key = result.group(0).lower()
elif header.lower() in Key.base_user_settable_fields:
# Check for special metadata that doesn't start with x-amz-meta
meta_key = header
if meta_key:
metadata = request.headers[header]
key.set_metadata(meta_key, metadata)

View File

@ -641,3 +641,16 @@ def test_unicode_value():
list(bucket.list())
key = bucket.get_key(key.key)
assert key.get_contents_as_string().decode("utf-8") == u'こんにちは.jpg'
@mock_s3
def test_setting_content_encoding():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = bucket.new_key("keyname")
key.set_metadata("Content-Encoding", "gzip")
compressed_data = "abcdef"
key.set_contents_from_string(compressed_data)
key = bucket.get_key("keyname")
key.content_encoding.should.equal("gzip")