moto/tests/test_s3/test_s3.py

907 lines
28 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
from __future__ import unicode_literals
2014-08-26 17:25:50 +00:00
from six.moves.urllib.request import urlopen
from six.moves.urllib.error import HTTPError
from functools import wraps
2013-03-26 14:52:33 +00:00
from io import BytesIO
2015-07-23 21:33:52 +00:00
import json
2013-02-18 21:09:40 +00:00
import boto
2015-08-02 13:54:23 +00:00
import boto3
2015-08-13 21:16:55 +00:00
from botocore.client import ClientError
from boto.exception import S3CreateError, S3ResponseError
2014-07-09 00:35:48 +00:00
from boto.s3.connection import S3Connection
2013-02-18 21:09:40 +00:00
from boto.s3.key import Key
2013-03-29 21:45:33 +00:00
from freezegun import freeze_time
2013-02-26 04:21:49 +00:00
import requests
import tests.backport_assert_raises # noqa
2014-08-26 17:25:50 +00:00
from nose.tools import assert_raises
2013-02-18 21:09:40 +00:00
2013-08-03 21:21:25 +00:00
import sure # noqa
2013-02-18 22:17:19 +00:00
2013-02-18 21:09:40 +00:00
from moto import mock_s3
REDUCED_PART_SIZE = 256
def reduced_min_part_size(f):
""" speed up tests by temporarily making the multipart minimum part size
small
"""
import moto.s3.models as s3model
orig_size = s3model.UPLOAD_PART_MIN_SIZE
@wraps(f)
def wrapped(*args, **kwargs):
try:
s3model.UPLOAD_PART_MIN_SIZE = REDUCED_PART_SIZE
return f(*args, **kwargs)
finally:
s3model.UPLOAD_PART_MIN_SIZE = orig_size
return wrapped
2013-02-18 21:09:40 +00:00
class MyModel(object):
def __init__(self, name, value):
self.name = name
self.value = value
def save(self):
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.get_bucket('mybucket')
k = Key(bucket)
k.key = self.name
k.set_contents_from_string(self.value)
@mock_s3
def test_my_model_save():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
conn.create_bucket('mybucket')
####################################
model_instance = MyModel('steve', 'is awesome')
model_instance.save()
2014-08-26 17:25:50 +00:00
conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal(b'is awesome')
2013-02-18 22:17:19 +00:00
@mock_s3
def test_key_etag():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
conn.create_bucket('mybucket')
####################################
model_instance = MyModel('steve', 'is awesome')
model_instance.save()
conn.get_bucket('mybucket').get_key('steve').etag.should.equal(
'"d32bda93738f7e03adb22e66c90fbc04"')
2013-03-26 14:52:33 +00:00
@mock_s3
2013-11-15 09:59:30 +00:00
def test_multipart_upload_too_small():
2013-03-26 14:52:33 +00:00
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
multipart.upload_part_from_file(BytesIO(b'hello'), 1)
multipart.upload_part_from_file(BytesIO(b'world'), 2)
2013-03-26 14:52:33 +00:00
# Multipart with total size under 5MB is refused
multipart.complete_upload.should.throw(S3ResponseError)
2013-03-26 14:52:33 +00:00
2013-11-15 09:59:30 +00:00
@mock_s3
@reduced_min_part_size
2013-11-15 09:59:30 +00:00
def test_multipart_upload():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
2013-03-26 14:52:33 +00:00
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
2013-11-15 09:59:30 +00:00
multipart.upload_part_from_file(BytesIO(part1), 1)
# last part, can be less than 5 MB
part2 = b'1'
2013-11-15 09:59:30 +00:00
multipart.upload_part_from_file(BytesIO(part2), 2)
2013-03-26 14:52:33 +00:00
multipart.complete_upload()
2013-11-15 09:59:30 +00:00
# we should get both parts as the key contents
2013-03-26 14:52:33 +00:00
bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + part2)
@mock_s3
@reduced_min_part_size
def test_multipart_upload_out_of_order():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
# last part, can be less than 5 MB
part2 = b'1'
multipart.upload_part_from_file(BytesIO(part2), 4)
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 2)
multipart.complete_upload()
# we should get both parts as the key contents
bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + part2)
@mock_s3
@reduced_min_part_size
def test_multipart_upload_with_headers():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key", metadata={"foo": "bar"})
part1 = b'0' * 10
multipart.upload_part_from_file(BytesIO(part1), 1)
multipart.complete_upload()
key = bucket.get_key("the-key")
key.metadata.should.equal({"foo": "bar"})
2014-03-30 15:50:36 +00:00
@mock_s3
@reduced_min_part_size
2014-03-30 15:50:36 +00:00
def test_multipart_upload_with_copy_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "original-key"
key.set_contents_from_string("key_value")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
2014-03-30 15:50:36 +00:00
multipart.upload_part_from_file(BytesIO(part1), 1)
multipart.copy_part_from_key("foobar", "original-key", 2)
multipart.complete_upload()
2014-08-26 17:25:50 +00:00
bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + b"key_value")
2014-03-30 15:50:36 +00:00
@mock_s3
@reduced_min_part_size
2014-03-30 15:50:36 +00:00
def test_multipart_upload_cancel():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
2014-03-30 15:50:36 +00:00
multipart.upload_part_from_file(BytesIO(part1), 1)
multipart.cancel_upload()
# TODO we really need some sort of assertion here, but we don't currently
# have the ability to list mulipart uploads for a bucket.
@mock_s3
@reduced_min_part_size
def test_multipart_etag():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('mybucket')
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
# last part, can be less than 5 MB
part2 = b'1'
multipart.upload_part_from_file(BytesIO(part2), 2)
multipart.complete_upload()
# we should get both parts as the key contents
bucket.get_key("the-key").etag.should.equal(
2015-03-06 00:05:00 +00:00
'"66d1a1a2ed08fd05c137f316af4ff255-2"')
@mock_s3
@reduced_min_part_size
def test_multipart_invalid_order():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('mybucket')
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * 5242880
etag1 = multipart.upload_part_from_file(BytesIO(part1), 1).etag
# last part, can be less than 5 MB
part2 = b'1'
etag2 = multipart.upload_part_from_file(BytesIO(part2), 2).etag
2015-02-10 15:31:40 +00:00
xml = "<Part><PartNumber>{0}</PartNumber><ETag>{1}</ETag></Part>"
xml = xml.format(2, etag2) + xml.format(1, etag1)
2015-02-10 15:31:40 +00:00
xml = "<CompleteMultipartUpload>{0}</CompleteMultipartUpload>".format(xml)
bucket.complete_multipart_upload.when.called_with(
multipart.key_name, multipart.id, xml).should.throw(S3ResponseError)
2015-06-27 21:56:37 +00:00
@mock_s3
@reduced_min_part_size
def test_multipart_duplicate_upload():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
# same part again
multipart.upload_part_from_file(BytesIO(part1), 1)
part2 = b'1' * 1024
multipart.upload_part_from_file(BytesIO(part2), 2)
multipart.complete_upload()
# We should get only one copy of part 1.
bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + part2)
2015-06-27 21:56:37 +00:00
2014-04-02 16:03:40 +00:00
@mock_s3
def test_list_multiparts():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('mybucket')
multipart1 = bucket.initiate_multipart_upload("one-key")
multipart2 = bucket.initiate_multipart_upload("two-key")
uploads = bucket.get_all_multipart_uploads()
uploads.should.have.length_of(2)
dict([(u.key_name, u.id) for u in uploads]).should.equal(
{'one-key': multipart1.id, 'two-key': multipart2.id})
multipart2.cancel_upload()
uploads = bucket.get_all_multipart_uploads()
uploads.should.have.length_of(1)
uploads[0].key_name.should.equal("one-key")
multipart1.cancel_upload()
uploads = bucket.get_all_multipart_uploads()
uploads.should.be.empty
@mock_s3
def test_key_save_to_missing_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.get_bucket('mybucket', validate=False)
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string.when.called_with("foobar").should.throw(S3ResponseError)
@mock_s3
def test_missing_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
bucket.get_key("the-key").should.equal(None)
@mock_s3
def test_missing_key_urllib2():
conn = boto.connect_s3('the_key', 'the_secret')
conn.create_bucket("foobar")
2014-08-26 17:25:50 +00:00
urlopen.when.called_with("http://foobar.s3.amazonaws.com/the-key").should.throw(HTTPError)
@mock_s3
def test_empty_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("")
key = bucket.get_key("the-key")
key.size.should.equal(0)
key.get_contents_as_string().should.equal(b'')
2013-02-20 04:29:46 +00:00
@mock_s3
def test_empty_key_set_on_existing_key():
2013-02-20 04:29:46 +00:00
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("foobar")
2013-02-20 04:29:46 +00:00
key = bucket.get_key("the-key")
key.size.should.equal(6)
key.get_contents_as_string().should.equal(b'foobar')
2013-02-20 04:29:46 +00:00
key.set_contents_from_string("")
2014-08-26 17:25:50 +00:00
bucket.get_key("the-key").get_contents_as_string().should.equal(b'')
@mock_s3
def test_large_key_save():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("foobar" * 100000)
2014-08-26 17:25:50 +00:00
bucket.get_key("the-key").get_contents_as_string().should.equal(b'foobar' * 100000)
2013-02-20 04:29:46 +00:00
@mock_s3
2013-02-20 04:29:46 +00:00
def test_copy_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
2013-02-20 04:29:46 +00:00
bucket.copy_key('new-key', 'foobar', 'the-key')
2014-08-26 17:25:50 +00:00
bucket.get_key("the-key").get_contents_as_string().should.equal(b"some value")
bucket.get_key("new-key").get_contents_as_string().should.equal(b"some value")
@mock_s3
def test_set_metadata():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = 'the-key'
key.set_metadata('md', 'Metadatastring')
key.set_contents_from_string("Testval")
bucket.get_key('the-key').get_metadata('md').should.equal('Metadatastring')
@mock_s3
def test_copy_key_replace_metadata():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_metadata('md', 'Metadatastring')
key.set_contents_from_string("some value")
bucket.copy_key('new-key', 'foobar', 'the-key',
metadata={'momd': 'Mometadatastring'})
bucket.get_key("new-key").get_metadata('md').should.be.none
bucket.get_key("new-key").get_metadata('momd').should.equal('Mometadatastring')
2013-03-29 21:45:33 +00:00
@freeze_time("2012-01-01 12:00:00")
@mock_s3
def test_last_modified():
# See https://github.com/boto/boto/issues/466
conn = boto.connect_s3()
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
2013-03-29 21:45:33 +00:00
rs = bucket.get_all_keys()
2014-11-30 04:34:40 +00:00
rs[0].last_modified.should.equal('2012-01-01T12:00:00.000Z')
2013-03-29 21:45:33 +00:00
bucket.get_key("the-key").last_modified.should.equal('Sun, 01 Jan 2012 12:00:00 GMT')
2013-02-18 22:17:19 +00:00
@mock_s3
def test_missing_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
conn.get_bucket.when.called_with('mybucket').should.throw(S3ResponseError)
@mock_s3
def test_bucket_with_dash():
conn = boto.connect_s3('the_key', 'the_secret')
conn.get_bucket.when.called_with('mybucket-test').should.throw(S3ResponseError)
@mock_s3
def test_create_existing_bucket():
"Trying to create a bucket that already exists should raise an Error"
conn = boto.s3.connect_to_region("us-west-2")
conn.create_bucket("foobar")
2014-08-26 17:25:50 +00:00
with assert_raises(S3CreateError):
conn.create_bucket('foobar')
@mock_s3
def test_create_existing_bucket_in_us_east_1():
"Trying to create a bucket that already exists in us-east-1 returns the bucket"
""""
http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
Your previous request to create the named bucket succeeded and you already
own it. You get this error in all AWS regions except US Standard,
us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if
bucket exists it Amazon S3 will not do anything).
"""
conn = boto.s3.connect_to_region("us-east-1")
conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar")
bucket.name.should.equal("foobar")
2014-07-09 00:35:48 +00:00
@mock_s3
def test_other_region():
conn = S3Connection('key', 'secret', host='s3-website-ap-southeast-2.amazonaws.com')
conn.create_bucket("foobar")
list(conn.get_bucket("foobar").get_all_keys()).should.equal([])
2013-02-18 22:17:19 +00:00
@mock_s3
def test_bucket_deletion():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
# Try to delete a bucket that still has keys
2013-02-18 22:17:19 +00:00
conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError)
bucket.delete_key("the-key")
conn.delete_bucket("foobar")
# Get non-existing bucket
2013-02-18 22:17:19 +00:00
conn.get_bucket.when.called_with("foobar").should.throw(S3ResponseError)
2013-02-18 22:31:15 +00:00
# Delete non-existant bucket
conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError)
2013-02-18 22:31:15 +00:00
@mock_s3
def test_get_all_buckets():
conn = boto.connect_s3('the_key', 'the_secret')
conn.create_bucket("foobar")
conn.create_bucket("foobar2")
2013-02-18 22:31:15 +00:00
buckets = conn.get_all_buckets()
buckets.should.have.length_of(2)
2013-02-26 04:21:49 +00:00
@mock_s3
def test_post_to_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
2013-05-17 23:41:39 +00:00
requests.post("https://foobar.s3.amazonaws.com/", {
'key': 'the-key',
'file': 'nothing'
})
2014-08-26 17:25:50 +00:00
bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing')
2013-05-17 23:41:39 +00:00
@mock_s3
def test_post_with_metadata_to_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
2013-05-17 23:41:39 +00:00
requests.post("https://foobar.s3.amazonaws.com/", {
'key': 'the-key',
'file': 'nothing',
'x-amz-meta-test': 'metadata'
})
bucket.get_key('the-key').get_metadata('test').should.equal('metadata')
2014-07-09 00:35:48 +00:00
2015-06-27 21:56:37 +00:00
@mock_s3
def test_delete_missing_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
bucket.delete_key.when.called_with('foobar').should.throw(S3ResponseError)
@mock_s3
def test_delete_keys():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
Key(bucket=bucket, name='file1').set_contents_from_string('abc')
Key(bucket=bucket, name='file2').set_contents_from_string('abc')
Key(bucket=bucket, name='file3').set_contents_from_string('abc')
Key(bucket=bucket, name='file4').set_contents_from_string('abc')
result = bucket.delete_keys(['file2', 'file3'])
result.deleted.should.have.length_of(2)
result.errors.should.have.length_of(0)
keys = bucket.get_all_keys()
keys.should.have.length_of(2)
keys[0].name.should.equal('file1')
2014-07-09 00:35:48 +00:00
@mock_s3
def test_delete_keys_with_invalid():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
Key(bucket=bucket, name='file1').set_contents_from_string('abc')
Key(bucket=bucket, name='file2').set_contents_from_string('abc')
Key(bucket=bucket, name='file3').set_contents_from_string('abc')
Key(bucket=bucket, name='file4').set_contents_from_string('abc')
result = bucket.delete_keys(['abc', 'file3'])
result.deleted.should.have.length_of(1)
result.errors.should.have.length_of(1)
keys = bucket.get_all_keys()
keys.should.have.length_of(3)
keys[0].name.should.equal('file1')
2013-05-17 23:41:39 +00:00
2014-07-09 00:35:48 +00:00
2013-02-26 04:21:49 +00:00
@mock_s3
def test_bucket_method_not_implemented():
requests.patch.when.called_with("https://foobar.s3.amazonaws.com/").should.throw(NotImplementedError)
2013-02-26 04:21:49 +00:00
@mock_s3
def test_key_method_not_implemented():
requests.post.when.called_with("https://foobar.s3.amazonaws.com/foo").should.throw(NotImplementedError)
@mock_s3
def test_bucket_name_with_dot():
conn = boto.connect_s3()
bucket = conn.create_bucket('firstname.lastname')
k = Key(bucket, 'somekey')
k.set_contents_from_string('somedata')
2013-04-13 23:00:37 +00:00
@mock_s3
def test_key_with_special_characters():
conn = boto.connect_s3()
bucket = conn.create_bucket('test_bucket_name')
key = Key(bucket, 'test_list_keys_2/x?y')
key.set_contents_from_string('value1')
key_list = bucket.list('test_list_keys_2/', '/')
keys = [x for x in key_list]
keys[0].name.should.equal("test_list_keys_2/x?y")
@mock_s3
def test_bucket_key_listing_order():
conn = boto.connect_s3()
bucket = conn.create_bucket('test_bucket')
prefix = 'toplevel/'
def store(name):
k = Key(bucket, prefix + name)
k.set_contents_from_string('somedata')
names = ['x/key', 'y.key1', 'y.key2', 'y.key3', 'x/y/key', 'x/y/z/key']
for name in names:
store(name)
delimiter = None
keys = [x.name for x in bucket.list(prefix, delimiter)]
keys.should.equal([
'toplevel/x/key', 'toplevel/x/y/key', 'toplevel/x/y/z/key',
'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3'
])
delimiter = '/'
keys = [x.name for x in bucket.list(prefix, delimiter)]
keys.should.equal([
'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3', 'toplevel/x/'
])
# Test delimiter with no prefix
delimiter = '/'
keys = [x.name for x in bucket.list(prefix=None, delimiter=delimiter)]
keys.should.equal(['toplevel/'])
delimiter = None
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
keys.should.equal([u'toplevel/x/key', u'toplevel/x/y/key', u'toplevel/x/y/z/key'])
delimiter = '/'
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
keys.should.equal([u'toplevel/x/'])
2014-03-26 15:52:31 +00:00
@mock_s3
def test_key_with_reduced_redundancy():
conn = boto.connect_s3()
bucket = conn.create_bucket('test_bucket_name')
key = Key(bucket, 'test_rr_key')
key.set_contents_from_string('value1', reduced_redundancy=True)
# we use the bucket iterator because of:
# https:/github.com/boto/boto/issues/1173
list(bucket)[0].storage_class.should.equal('REDUCED_REDUNDANCY')
@mock_s3
def test_copy_key_reduced_redundancy():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
bucket.copy_key('new-key', 'foobar', 'the-key', storage_class='REDUCED_REDUNDANCY')
# we use the bucket iterator because of:
# https:/github.com/boto/boto/issues/1173
keys = dict([(k.name, k) for k in bucket])
keys['new-key'].storage_class.should.equal("REDUCED_REDUNDANCY")
keys['the-key'].storage_class.should.equal("STANDARD")
2014-03-26 17:15:08 +00:00
@freeze_time("2012-01-01 12:00:00")
@mock_s3
def test_restore_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
list(bucket)[0].ongoing_restore.should.be.none
key.restore(1)
key = bucket.get_key('the-key')
key.ongoing_restore.should_not.be.none
key.ongoing_restore.should.be.false
key.expiry_date.should.equal("Mon, 02 Jan 2012 12:00:00 GMT")
key.restore(2)
key = bucket.get_key('the-key')
key.ongoing_restore.should_not.be.none
key.ongoing_restore.should.be.false
key.expiry_date.should.equal("Tue, 03 Jan 2012 12:00:00 GMT")
@freeze_time("2012-01-01 12:00:00")
@mock_s3
def test_restore_key_headers():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
key.restore(1, headers={'foo': 'bar'})
key = bucket.get_key('the-key')
key.ongoing_restore.should_not.be.none
key.ongoing_restore.should.be.false
key.expiry_date.should.equal("Mon, 02 Jan 2012 12:00:00 GMT")
@mock_s3
def test_get_versioning_status():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
d = bucket.get_versioning_status()
d.should.be.empty
bucket.configure_versioning(versioning=True)
d = bucket.get_versioning_status()
d.shouldnt.be.empty
d.should.have.key('Versioning').being.equal('Enabled')
bucket.configure_versioning(versioning=False)
d = bucket.get_versioning_status()
d.should.have.key('Versioning').being.equal('Suspended')
@mock_s3
def test_key_version():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
bucket.configure_versioning(versioning=True)
key = Key(bucket)
key.key = 'the-key'
key.version_id.should.be.none
key.set_contents_from_string('some string')
key.version_id.should.equal('0')
key.set_contents_from_string('some string')
key.version_id.should.equal('1')
key = bucket.get_key('the-key')
key.version_id.should.equal('1')
2014-06-27 22:21:32 +00:00
@mock_s3
def test_list_versions():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
bucket.configure_versioning(versioning=True)
key = Key(bucket, 'the-key')
key.version_id.should.be.none
key.set_contents_from_string("Version 1")
key.version_id.should.equal('0')
key.set_contents_from_string("Version 2")
key.version_id.should.equal('1')
versions = list(bucket.list_versions())
versions.should.have.length_of(2)
versions[0].name.should.equal('the-key')
versions[0].version_id.should.equal('0')
2014-08-26 17:25:50 +00:00
versions[0].get_contents_as_string().should.equal(b"Version 1")
2014-06-27 22:21:32 +00:00
versions[1].name.should.equal('the-key')
versions[1].version_id.should.equal('1')
2014-08-26 17:25:50 +00:00
versions[1].get_contents_as_string().should.equal(b"Version 2")
@mock_s3
def test_acl_is_ignored_for_now():
conn = boto.connect_s3()
bucket = conn.create_bucket('foobar')
2014-08-26 17:25:50 +00:00
content = b'imafile'
keyname = 'test.txt'
key = Key(bucket, name=keyname)
key.content_type = 'text/plain'
key.set_contents_from_string(content)
key.make_public()
key = bucket.get_key(keyname)
assert key.get_contents_as_string() == content
@mock_s3
def test_unicode_key():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = Key(bucket)
key.key = u'こんにちは.jpg'
key.set_contents_from_string('Hello world!')
list(bucket.list())
key = bucket.get_key(key.key)
assert key.get_contents_as_string().decode("utf-8") == 'Hello world!'
@mock_s3
def test_unicode_value():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = Key(bucket)
key.key = 'some_key'
key.set_contents_from_string(u'こんにちは.jpg')
list(bucket.list())
key = bucket.get_key(key.key)
assert key.get_contents_as_string().decode("utf-8") == u'こんにちは.jpg'
2014-11-27 15:43:10 +00:00
@mock_s3
def test_setting_content_encoding():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = bucket.new_key("keyname")
key.set_metadata("Content-Encoding", "gzip")
compressed_data = "abcdef"
key.set_contents_from_string(compressed_data)
key = bucket.get_key("keyname")
key.content_encoding.should.equal("gzip")
2014-12-11 01:44:00 +00:00
@mock_s3
def test_bucket_location():
conn = boto.s3.connect_to_region("us-west-2")
bucket = conn.create_bucket('mybucket')
bucket.get_location().should.equal("us-west-2")
2015-02-10 15:28:18 +00:00
@mock_s3
def test_ranged_get():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = Key(bucket)
key.key = 'bigkey'
2015-02-11 09:08:00 +00:00
rep = b"0123456789"
2015-02-10 17:43:24 +00:00
key.set_contents_from_string(rep * 10)
# Implicitly bounded range requests.
2015-02-10 17:43:24 +00:00
key.get_contents_as_string(headers={'Range': 'bytes=0-'}).should.equal(rep * 10)
key.get_contents_as_string(headers={'Range': 'bytes=50-'}).should.equal(rep * 5)
key.get_contents_as_string(headers={'Range': 'bytes=99-'}).should.equal(b'9')
# Explicitly bounded range requests starting from the first byte.
key.get_contents_as_string(headers={'Range': 'bytes=0-0'}).should.equal(b'0')
key.get_contents_as_string(headers={'Range': 'bytes=0-49'}).should.equal(rep * 5)
2015-02-10 17:43:24 +00:00
key.get_contents_as_string(headers={'Range': 'bytes=0-99'}).should.equal(rep * 10)
2015-06-30 01:26:00 +00:00
key.get_contents_as_string(headers={'Range': 'bytes=0-100'}).should.equal(rep * 10)
key.get_contents_as_string(headers={'Range': 'bytes=0-700'}).should.equal(rep * 10)
# Explicitly bounded range requests starting from the / a middle byte.
2015-02-10 17:43:24 +00:00
key.get_contents_as_string(headers={'Range': 'bytes=50-54'}).should.equal(rep[:5])
key.get_contents_as_string(headers={'Range': 'bytes=50-99'}).should.equal(rep * 5)
key.get_contents_as_string(headers={'Range': 'bytes=50-100'}).should.equal(rep * 5)
key.get_contents_as_string(headers={'Range': 'bytes=50-700'}).should.equal(rep * 5)
# Explicitly bounded range requests starting from the last byte.
key.get_contents_as_string(headers={'Range': 'bytes=99-99'}).should.equal(b'9')
key.get_contents_as_string(headers={'Range': 'bytes=99-100'}).should.equal(b'9')
key.get_contents_as_string(headers={'Range': 'bytes=99-700'}).should.equal(b'9')
# Suffix range requests.
key.get_contents_as_string(headers={'Range': 'bytes=-1'}).should.equal(b'9')
2015-02-10 17:43:24 +00:00
key.get_contents_as_string(headers={'Range': 'bytes=-60'}).should.equal(rep * 6)
key.get_contents_as_string(headers={'Range': 'bytes=-100'}).should.equal(rep * 10)
key.get_contents_as_string(headers={'Range': 'bytes=-101'}).should.equal(rep * 10)
key.get_contents_as_string(headers={'Range': 'bytes=-700'}).should.equal(rep * 10)
key.size.should.equal(100)
2015-07-23 21:33:52 +00:00
@mock_s3
def test_policy():
conn = boto.connect_s3()
bucket_name = 'mybucket'
bucket = conn.create_bucket(bucket_name)
policy = json.dumps({
"Version": "2012-10-17",
"Id": "PutObjPolicy",
"Statement": [
{
"Sid": "DenyUnEncryptedObjectUploads",
"Effect": "Deny",
"Principal": "*",
"Action": "s3:PutObject",
"Resource": "arn:aws:s3:::{bucket_name}/*".format(bucket_name=bucket_name),
"Condition": {
"StringNotEquals": {
"s3:x-amz-server-side-encryption": "aws:kms"
}
}
}
]
})
with assert_raises(S3ResponseError) as err:
bucket.get_policy()
ex = err.exception
ex.box_usage.should.be.none
ex.error_code.should.equal('NoSuchBucketPolicy')
ex.message.should.equal('The bucket policy does not exist')
ex.reason.should.equal('Not Found')
ex.resource.should.be.none
ex.status.should.equal(404)
ex.body.should.contain(bucket_name)
ex.request_id.should_not.be.none
bucket.set_policy(policy).should.be.true
bucket = conn.get_bucket(bucket_name)
bucket.get_policy().decode('utf-8').should.equal(policy)
2015-08-02 13:54:23 +00:00
bucket.delete_policy()
with assert_raises(S3ResponseError) as err:
bucket.get_policy()
2015-08-02 13:54:23 +00:00
"""
boto3
"""
@mock_s3
def test_boto3_bucket_create():
s3 = boto3.resource('s3', region_name='us-east-1')
s3.create_bucket(Bucket="blah")
s3.Object('blah', 'hello.txt').put(Body="some text")
2015-08-02 13:59:16 +00:00
s3.Object('blah', 'hello.txt').get()['Body'].read().decode("utf-8").should.equal("some text")
2015-08-13 21:16:55 +00:00
@mock_s3
def test_boto3_head_object():
s3 = boto3.resource('s3', region_name='us-east-1')
s3.create_bucket(Bucket="blah")
s3.Object('blah', 'hello.txt').put(Body="some text")
s3.Object('blah', 'hello.txt').meta.client.head_object(Bucket='blah', Key='hello.txt')
with assert_raises(ClientError) as err:
s3.Object('blah', 'hello2.txt').meta.client.head_object(Bucket='blah', Key='hello_bad.txt')