Fixing S3 copy_key error when using unicode.
This commit is contained in:
parent
1095b7d94b
commit
6897a118ed
@ -19,7 +19,8 @@ from .exceptions import BucketAlreadyExists, S3ClientError, MissingBucket, Missi
|
||||
MalformedACLError, InvalidNotificationARN, InvalidNotificationEvent
|
||||
from .models import s3_backend, get_canned_acl, FakeGrantee, FakeGrant, FakeAcl, FakeKey, FakeTagging, FakeTagSet, \
|
||||
FakeTag
|
||||
from .utils import bucket_name_from_url, metadata_from_headers, parse_region_from_url
|
||||
from .utils import bucket_name_from_url, clean_key_name, metadata_from_headers, \
|
||||
parse_region_from_url
|
||||
from xml.dom import minidom
|
||||
|
||||
|
||||
@ -733,7 +734,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
||||
# Copy key
|
||||
# you can have a quoted ?version=abc with a version Id, so work on
|
||||
# we need to parse the unquoted string first
|
||||
src_key = request.headers.get("x-amz-copy-source")
|
||||
src_key = clean_key_name(request.headers.get("x-amz-copy-source"))
|
||||
if isinstance(src_key, six.binary_type):
|
||||
src_key = src_key.decode('utf-8')
|
||||
src_key_parsed = urlparse(src_key)
|
||||
|
@ -418,6 +418,22 @@ def test_copy_key():
|
||||
"new-key").get_contents_as_string().should.equal(b"some value")
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
def test_copy_key_with_unicode():
|
||||
conn = boto.connect_s3('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
key = Key(bucket)
|
||||
key.key = "the-unicode-💩-key"
|
||||
key.set_contents_from_string("some value")
|
||||
|
||||
bucket.copy_key('new-key', 'foobar', 'the-unicode-💩-key')
|
||||
|
||||
bucket.get_key(
|
||||
"the-unicode-💩-key").get_contents_as_string().should.equal(b"some value")
|
||||
bucket.get_key(
|
||||
"new-key").get_contents_as_string().should.equal(b"some value")
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
def test_copy_key_with_version():
|
||||
conn = boto.connect_s3('the_key', 'the_secret')
|
||||
|
Loading…
Reference in New Issue
Block a user