2017-05-19 22:59:25 +00:00
import datetime
2020-09-02 06:10:56 +00:00
import os
2021-07-26 06:40:39 +00:00
from urllib . parse import urlparse , parse_qs
2015-02-27 23:42:37 +00:00
from functools import wraps
2017-06-03 23:06:49 +00:00
from gzip import GzipFile
2013-03-26 14:52:33 +00:00
from io import BytesIO
2017-06-03 23:06:49 +00:00
import zlib
2018-12-20 19:15:15 +00:00
import pickle
2019-05-29 20:22:29 +00:00
import uuid
2013-02-27 06:12:11 +00:00
2015-07-23 21:33:52 +00:00
import json
2015-08-02 13:54:23 +00:00
import boto3
2015-08-13 21:16:55 +00:00
from botocore . client import ClientError
2017-05-19 22:59:25 +00:00
import botocore . exceptions
2022-01-18 15:18:57 +00:00
from botocore . handlers import disable_signing
2013-03-29 21:45:33 +00:00
from freezegun import freeze_time
2013-02-26 04:21:49 +00:00
import requests
2021-06-10 09:48:28 +00:00
2020-02-02 10:36:51 +00:00
from moto . s3 . responses import DEFAULT_REGION_NAME
2020-10-06 05:54:49 +00:00
from unittest import SkipTest
import pytest
2013-02-18 21:09:40 +00:00
2021-10-18 19:44:29 +00:00
import sure # noqa # pylint: disable=unused-import
2013-02-18 22:17:19 +00:00
2022-01-18 15:18:57 +00:00
from moto import settings , mock_s3 , mock_config , mock_kms
2017-02-24 00:43:48 +00:00
import moto . s3 . models as s3model
2019-09-24 00:16:20 +00:00
from moto . core . exceptions import InvalidNextTokenException
2021-07-07 15:38:50 +00:00
from moto . settings import get_s3_default_key_buffer_size , S3_UPLOAD_PART_MIN_SIZE
2021-10-14 18:13:40 +00:00
from uuid import uuid4
2020-02-27 08:54:57 +00:00
2017-02-24 00:43:48 +00:00
if settings . TEST_SERVER_MODE :
2021-07-07 15:38:50 +00:00
REDUCED_PART_SIZE = S3_UPLOAD_PART_MIN_SIZE
2017-02-24 00:43:48 +00:00
EXPECTED_ETAG = ' " 140f92a6df9f9e415f74a1463bcee9bb-2 " '
else :
REDUCED_PART_SIZE = 256
EXPECTED_ETAG = ' " 66d1a1a2ed08fd05c137f316af4ff255-2 " '
2015-02-10 11:19:45 +00:00
def reduced_min_part_size ( f ) :
2020-10-06 06:46:05 +00:00
""" speed up tests by temporarily making the multipart minimum part size
small
2015-02-10 11:19:45 +00:00
"""
2021-07-07 15:38:50 +00:00
orig_size = S3_UPLOAD_PART_MIN_SIZE
2015-02-10 11:19:45 +00:00
2015-02-27 23:42:37 +00:00
@wraps ( f )
2015-02-10 11:19:45 +00:00
def wrapped ( * args , * * kwargs ) :
try :
2021-07-07 15:38:50 +00:00
s3model . S3_UPLOAD_PART_MIN_SIZE = REDUCED_PART_SIZE
2015-02-10 11:19:45 +00:00
return f ( * args , * * kwargs )
finally :
2021-07-07 15:38:50 +00:00
s3model . S3_UPLOAD_PART_MIN_SIZE = orig_size
2018-01-03 04:47:57 +00:00
2015-02-10 11:19:45 +00:00
return wrapped
2013-02-18 21:09:40 +00:00
class MyModel ( object ) :
2021-10-18 19:44:29 +00:00
def __init__ ( self , name , value , metadata = None ) :
2013-02-18 21:09:40 +00:00
self . name = name
self . value = value
2021-10-18 19:44:29 +00:00
self . metadata = metadata or { }
2013-02-18 21:09:40 +00:00
def save ( self ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2021-05-26 13:27:25 +00:00
s3 . put_object (
Bucket = " mybucket " , Key = self . name , Body = self . value , Metadata = self . metadata
)
2013-02-18 21:09:40 +00:00
2018-12-20 19:15:15 +00:00
@mock_s3
def test_keys_are_pickleable ( ) :
""" Keys must be pickleable due to boto3 implementation details. """
2019-10-31 15:44:26 +00:00
key = s3model . FakeKey ( " name " , b " data! " )
assert key . value == b " data! "
2018-12-20 19:40:13 +00:00
2018-12-20 19:15:15 +00:00
pickled = pickle . dumps ( key )
loaded = pickle . loads ( pickled )
assert loaded . value == key . value
2017-02-24 00:43:48 +00:00
@mock_s3
2013-02-18 21:09:40 +00:00
def test_my_model_save ( ) :
# Create Bucket so that test can run
2020-02-02 10:36:51 +00:00
conn = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
conn . create_bucket ( Bucket = " mybucket " )
2013-02-18 21:09:40 +00:00
####################################
2019-10-31 15:44:26 +00:00
model_instance = MyModel ( " steve " , " is awesome " )
2013-02-18 21:09:40 +00:00
model_instance . save ( )
2019-10-31 15:44:26 +00:00
body = conn . Object ( " mybucket " , " steve " ) . get ( ) [ " Body " ] . read ( ) . decode ( )
2017-02-24 00:43:48 +00:00
2019-10-31 15:44:26 +00:00
assert body == " is awesome "
2013-02-18 22:17:19 +00:00
2013-02-19 02:22:03 +00:00
2021-05-26 13:27:25 +00:00
@mock_s3
def test_object_metadata ( ) :
""" Metadata keys can contain certain special characters like dash and dot """
# Create Bucket so that test can run
conn = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
conn . create_bucket ( Bucket = " mybucket " )
####################################
metadata = { " meta " : " simple " , " my-meta " : " dash " , " meta.data " : " namespaced " }
model_instance = MyModel ( " steve " , " is awesome " , metadata = metadata )
model_instance . save ( )
meta = conn . Object ( " mybucket " , " steve " ) . get ( ) [ " Metadata " ]
assert meta == metadata
2017-02-24 00:50:34 +00:00
@mock_s3
2014-04-02 11:40:04 +00:00
def test_key_etag ( ) :
2020-02-02 10:36:51 +00:00
conn = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
conn . create_bucket ( Bucket = " mybucket " )
2014-04-02 11:40:04 +00:00
2019-10-31 15:44:26 +00:00
model_instance = MyModel ( " steve " , " is awesome " )
2014-04-02 11:40:04 +00:00
model_instance . save ( )
2019-10-31 15:44:26 +00:00
conn . Bucket ( " mybucket " ) . Object ( " steve " ) . e_tag . should . equal (
' " d32bda93738f7e03adb22e66c90fbc04 " '
)
2014-04-02 11:40:04 +00:00
2021-09-21 20:06:42 +00:00
@mock_s3
def test_multipart_upload_too_small_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
mp = client . create_multipart_upload ( Bucket = " foobar " , Key = " the-key " )
up1 = client . upload_part (
Body = BytesIO ( b " hello " ) ,
PartNumber = 1 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
up2 = client . upload_part (
Body = BytesIO ( b " world " ) ,
PartNumber = 2 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
# Multipart with total size under 5MB is refused
with pytest . raises ( ClientError ) as ex :
client . complete_multipart_upload (
Bucket = " foobar " ,
Key = " the-key " ,
MultipartUpload = {
" Parts " : [
{ " ETag " : up1 [ " ETag " ] , " PartNumber " : 1 } ,
{ " ETag " : up2 [ " ETag " ] , " PartNumber " : 2 } ,
]
} ,
UploadId = mp [ " UploadId " ] ,
)
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " EntityTooSmall " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal (
" Your proposed upload is smaller than the minimum allowed object size. "
)
@mock_s3
@reduced_min_part_size
def test_multipart_upload_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
part1 = b " 0 " * REDUCED_PART_SIZE
part2 = b " 1 "
mp = client . create_multipart_upload ( Bucket = " foobar " , Key = " the-key " )
up1 = client . upload_part (
Body = BytesIO ( part1 ) ,
PartNumber = 1 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
up2 = client . upload_part (
Body = BytesIO ( part2 ) ,
PartNumber = 2 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
client . complete_multipart_upload (
Bucket = " foobar " ,
Key = " the-key " ,
MultipartUpload = {
" Parts " : [
{ " ETag " : up1 [ " ETag " ] , " PartNumber " : 1 } ,
{ " ETag " : up2 [ " ETag " ] , " PartNumber " : 2 } ,
]
} ,
UploadId = mp [ " UploadId " ] ,
)
# we should get both parts as the key contents
response = client . get_object ( Bucket = " foobar " , Key = " the-key " )
response [ " Body " ] . read ( ) . should . equal ( part1 + part2 )
@mock_s3
@reduced_min_part_size
def test_multipart_upload_out_of_order_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
part1 = b " 0 " * REDUCED_PART_SIZE
part2 = b " 1 "
mp = client . create_multipart_upload ( Bucket = " foobar " , Key = " the-key " )
up1 = client . upload_part (
Body = BytesIO ( part1 ) ,
PartNumber = 4 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
up2 = client . upload_part (
Body = BytesIO ( part2 ) ,
PartNumber = 2 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
client . complete_multipart_upload (
Bucket = " foobar " ,
Key = " the-key " ,
MultipartUpload = {
" Parts " : [
{ " ETag " : up1 [ " ETag " ] , " PartNumber " : 4 } ,
{ " ETag " : up2 [ " ETag " ] , " PartNumber " : 2 } ,
]
} ,
UploadId = mp [ " UploadId " ] ,
)
# we should get both parts as the key contents
response = client . get_object ( Bucket = " foobar " , Key = " the-key " )
response [ " Body " ] . read ( ) . should . equal ( part1 + part2 )
@mock_s3
@reduced_min_part_size
def test_multipart_upload_with_headers_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
part1 = b " 0 " * REDUCED_PART_SIZE
mp = client . create_multipart_upload (
Bucket = " foobar " , Key = " the-key " , Metadata = { " meta " : " data " }
)
up1 = client . upload_part (
Body = BytesIO ( part1 ) ,
PartNumber = 1 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
client . complete_multipart_upload (
Bucket = " foobar " ,
Key = " the-key " ,
MultipartUpload = { " Parts " : [ { " ETag " : up1 [ " ETag " ] , " PartNumber " : 1 } ] } ,
UploadId = mp [ " UploadId " ] ,
)
# we should get both parts as the key contents
response = client . get_object ( Bucket = " foobar " , Key = " the-key " )
response [ " Metadata " ] . should . equal ( { " meta " : " data " } )
2021-11-01 22:17:06 +00:00
@pytest.mark.parametrize (
" original_key_name " ,
[
" original-key " ,
" the-unicode-💩-key " ,
" key-with?question-mark " ,
" key-with %2F embedded %2F url %2F encoding " ,
] ,
)
2021-09-21 20:06:42 +00:00
@mock_s3
@reduced_min_part_size
2021-11-01 22:17:06 +00:00
def test_multipart_upload_with_copy_key_boto3 ( original_key_name ) :
2021-09-21 20:06:42 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
2021-11-01 22:17:06 +00:00
s3 . put_object ( Bucket = " foobar " , Key = original_key_name , Body = " key_value " )
2021-09-21 20:06:42 +00:00
mpu = s3 . create_multipart_upload ( Bucket = " foobar " , Key = " the-key " )
part1 = b " 0 " * REDUCED_PART_SIZE
up1 = s3 . upload_part (
Bucket = " foobar " ,
Key = " the-key " ,
PartNumber = 1 ,
UploadId = mpu [ " UploadId " ] ,
Body = BytesIO ( part1 ) ,
)
up2 = s3 . upload_part_copy (
Bucket = " foobar " ,
Key = " the-key " ,
2021-11-01 22:17:06 +00:00
CopySource = { " Bucket " : " foobar " , " Key " : original_key_name } ,
2021-09-21 20:06:42 +00:00
CopySourceRange = " 0-3 " ,
PartNumber = 2 ,
UploadId = mpu [ " UploadId " ] ,
)
s3 . complete_multipart_upload (
Bucket = " foobar " ,
Key = " the-key " ,
MultipartUpload = {
" Parts " : [
{ " ETag " : up1 [ " ETag " ] , " PartNumber " : 1 } ,
{ " ETag " : up2 [ " CopyPartResult " ] [ " ETag " ] , " PartNumber " : 2 } ,
]
} ,
UploadId = mpu [ " UploadId " ] ,
)
response = s3 . get_object ( Bucket = " foobar " , Key = " the-key " )
response [ " Body " ] . read ( ) . should . equal ( part1 + b " key_ " )
@mock_s3
@reduced_min_part_size
def test_multipart_upload_cancel_boto3 ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
mpu = s3 . create_multipart_upload ( Bucket = " foobar " , Key = " the-key " )
part1 = b " 0 " * REDUCED_PART_SIZE
s3 . upload_part (
Bucket = " foobar " ,
Key = " the-key " ,
PartNumber = 1 ,
UploadId = mpu [ " UploadId " ] ,
Body = BytesIO ( part1 ) ,
)
uploads = s3 . list_multipart_uploads ( Bucket = " foobar " ) [ " Uploads " ]
uploads . should . have . length_of ( 1 )
uploads [ 0 ] [ " Key " ] . should . equal ( " the-key " )
s3 . abort_multipart_upload ( Bucket = " foobar " , Key = " the-key " , UploadId = mpu [ " UploadId " ] )
s3 . list_multipart_uploads ( Bucket = " foobar " ) . shouldnt . have . key ( " Uploads " )
@mock_s3
@reduced_min_part_size
def test_multipart_etag_quotes_stripped_boto3 ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
s3 . put_object ( Bucket = " foobar " , Key = " original-key " , Body = " key_value " )
mpu = s3 . create_multipart_upload ( Bucket = " foobar " , Key = " the-key " )
part1 = b " 0 " * REDUCED_PART_SIZE
up1 = s3 . upload_part (
Bucket = " foobar " ,
Key = " the-key " ,
PartNumber = 1 ,
UploadId = mpu [ " UploadId " ] ,
Body = BytesIO ( part1 ) ,
)
etag1 = up1 [ " ETag " ] . replace ( ' " ' , " " )
up2 = s3 . upload_part_copy (
Bucket = " foobar " ,
Key = " the-key " ,
CopySource = { " Bucket " : " foobar " , " Key " : " original-key " } ,
CopySourceRange = " 0-3 " ,
PartNumber = 2 ,
UploadId = mpu [ " UploadId " ] ,
)
etag2 = up2 [ " CopyPartResult " ] [ " ETag " ] . replace ( ' " ' , " " )
s3 . complete_multipart_upload (
Bucket = " foobar " ,
Key = " the-key " ,
MultipartUpload = {
" Parts " : [
{ " ETag " : etag1 , " PartNumber " : 1 } ,
{ " ETag " : etag2 , " PartNumber " : 2 } ,
]
} ,
UploadId = mpu [ " UploadId " ] ,
)
response = s3 . get_object ( Bucket = " foobar " , Key = " the-key " )
response [ " Body " ] . read ( ) . should . equal ( part1 + b " key_ " )
@mock_s3
@reduced_min_part_size
def test_multipart_duplicate_upload_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
part1 = b " 0 " * REDUCED_PART_SIZE
part2 = b " 1 "
mp = client . create_multipart_upload ( Bucket = " foobar " , Key = " the-key " )
client . upload_part (
Body = BytesIO ( part1 ) ,
PartNumber = 1 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
# same part again
up1 = client . upload_part (
Body = BytesIO ( part1 ) ,
PartNumber = 1 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
up2 = client . upload_part (
Body = BytesIO ( part2 ) ,
PartNumber = 2 ,
Bucket = " foobar " ,
Key = " the-key " ,
UploadId = mp [ " UploadId " ] ,
)
client . complete_multipart_upload (
Bucket = " foobar " ,
Key = " the-key " ,
MultipartUpload = {
" Parts " : [
{ " ETag " : up1 [ " ETag " ] , " PartNumber " : 1 } ,
{ " ETag " : up2 [ " ETag " ] , " PartNumber " : 2 } ,
]
} ,
UploadId = mp [ " UploadId " ] ,
)
# we should get both parts as the key contents
response = client . get_object ( Bucket = " foobar " , Key = " the-key " )
response [ " Body " ] . read ( ) . should . equal ( part1 + part2 )
@mock_s3
def test_list_multiparts_boto3 ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
mpu1 = s3 . create_multipart_upload ( Bucket = " foobar " , Key = " one-key " )
mpu2 = s3 . create_multipart_upload ( Bucket = " foobar " , Key = " two-key " )
uploads = s3 . list_multipart_uploads ( Bucket = " foobar " ) [ " Uploads " ]
uploads . should . have . length_of ( 2 )
{ u [ " Key " ] : u [ " UploadId " ] for u in uploads } . should . equal (
{ " one-key " : mpu1 [ " UploadId " ] , " two-key " : mpu2 [ " UploadId " ] }
)
s3 . abort_multipart_upload ( Bucket = " foobar " , Key = " the-key " , UploadId = mpu2 [ " UploadId " ] )
uploads = s3 . list_multipart_uploads ( Bucket = " foobar " ) [ " Uploads " ]
uploads . should . have . length_of ( 1 )
uploads [ 0 ] [ " Key " ] . should . equal ( " one-key " )
s3 . abort_multipart_upload ( Bucket = " foobar " , Key = " the-key " , UploadId = mpu1 [ " UploadId " ] )
res = s3 . list_multipart_uploads ( Bucket = " foobar " )
res . shouldnt . have . key ( " Uploads " )
@mock_s3
def test_key_save_to_missing_bucket_boto3 ( ) :
s3 = boto3 . resource ( " s3 " )
key = s3 . Object ( " mybucket " , " the-key " )
with pytest . raises ( ClientError ) as ex :
key . put ( Body = b " foobar " )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchBucket " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal (
" The specified bucket does not exist "
)
@mock_s3
def test_missing_key_request_boto3 ( ) :
2022-02-08 16:35:37 +00:00
if settings . TEST_SERVER_MODE :
raise SkipTest ( " Only test status code in non-ServerMode " )
2021-09-21 20:06:42 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
response = requests . get ( " http://foobar.s3.amazonaws.com/the-key " )
2022-02-08 16:35:37 +00:00
response . status_code . should . equal ( 404 )
2021-09-21 20:06:42 +00:00
@mock_s3
def test_empty_key_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , " the-key " )
key . put ( Body = b " " )
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp . should . have . key ( " ContentLength " ) . equal ( 0 )
resp [ " Body " ] . read ( ) . should . equal ( b " " )
@mock_s3
def test_empty_key_set_on_existing_key_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , " the-key " )
key . put ( Body = b " some content " )
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp . should . have . key ( " ContentLength " ) . equal ( 12 )
resp [ " Body " ] . read ( ) . should . equal ( b " some content " )
key . put ( Body = b " " )
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp . should . have . key ( " ContentLength " ) . equal ( 0 )
resp [ " Body " ] . read ( ) . should . equal ( b " " )
@mock_s3
def test_large_key_save_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , " the-key " )
key . put ( Body = b " foobar " * 100000 )
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp [ " Body " ] . read ( ) . should . equal ( b " foobar " * 100000 )
@pytest.mark.parametrize (
2021-11-01 22:17:06 +00:00
" key_name " ,
[
" the-key " ,
" the-unicode-💩-key " ,
" key-with?question-mark " ,
" key-with %2F embedded %2F url %2F encoding " ,
] ,
2021-09-21 20:06:42 +00:00
)
@mock_s3
def test_copy_key_boto3 ( key_name ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , key_name )
key . put ( Body = b " some value " )
key2 = s3 . Object ( " foobar " , " new-key " )
key2 . copy_from ( CopySource = " foobar/ {} " . format ( key_name ) )
resp = client . get_object ( Bucket = " foobar " , Key = key_name )
resp [ " Body " ] . read ( ) . should . equal ( b " some value " )
resp = client . get_object ( Bucket = " foobar " , Key = " new-key " )
resp [ " Body " ] . read ( ) . should . equal ( b " some value " )
@mock_s3
def test_copy_key_with_version_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
client . put_bucket_versioning (
Bucket = " foobar " , VersioningConfiguration = { " Status " : " Enabled " }
)
key = s3 . Object ( " foobar " , " the-key " )
key . put ( Body = b " some value " )
key . put ( Body = b " another value " )
all_versions = client . list_object_versions ( Bucket = " foobar " , Prefix = " the-key " ) [
" Versions "
]
old_version = [ v for v in all_versions if not v [ " IsLatest " ] ] [ 0 ]
key2 = s3 . Object ( " foobar " , " new-key " )
key2 . copy_from (
CopySource = " foobar/the-key?versionId= {} " . format ( old_version [ " VersionId " ] )
)
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp [ " Body " ] . read ( ) . should . equal ( b " another value " )
resp = client . get_object ( Bucket = " foobar " , Key = " new-key " )
resp [ " Body " ] . read ( ) . should . equal ( b " some value " )
2022-01-25 19:25:39 +00:00
@mock_s3
def test_copy_object_with_bucketkeyenabled_returns_the_value ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " test-copy-object-with-bucketkeyenabled "
s3 . create_bucket ( Bucket = bucket_name )
key = s3 . Object ( bucket_name , " the-key " )
key . put ( Body = b " some value " )
key2 = s3 . Object ( bucket_name , " new-key " )
key2 . copy_from (
CopySource = f " { bucket_name } /the-key " ,
BucketKeyEnabled = True ,
ServerSideEncryption = " aws:kms " ,
)
resp = client . get_object ( Bucket = bucket_name , Key = " the-key " )
src_headers = resp [ " ResponseMetadata " ] [ " HTTPHeaders " ]
src_headers . shouldnt . have . key ( " x-amz-server-side-encryption " )
src_headers . shouldnt . have . key ( " x-amz-server-side-encryption-aws-kms-key-id " )
src_headers . shouldnt . have . key ( " x-amz-server-side-encryption-bucket-key-enabled " )
resp = client . get_object ( Bucket = bucket_name , Key = " new-key " )
target_headers = resp [ " ResponseMetadata " ] [ " HTTPHeaders " ]
target_headers . should . have . key ( " x-amz-server-side-encryption " )
# AWS will also return the KMS default key id - not yet implemented
# target_headers.should.have.key("x-amz-server-side-encryption-aws-kms-key-id")
# This field is only returned if encryption is set to 'aws:kms'
target_headers . should . have . key ( " x-amz-server-side-encryption-bucket-key-enabled " )
str (
target_headers [ " x-amz-server-side-encryption-bucket-key-enabled " ]
) . lower ( ) . should . equal ( " true " )
2021-09-21 20:06:42 +00:00
@mock_s3
def test_set_metadata_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , " the-key " )
key . put ( Body = b " some value " , Metadata = { " md " : " Metadatastring " } )
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp [ " Metadata " ] . should . equal ( { " md " : " Metadatastring " } )
2021-12-07 12:14:19 +00:00
@mock_s3
def test_copy_key_with_metadata_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , " the-key " )
metadata = { " md " : " Metadatastring " }
content_type = " application/json "
initial = key . put ( Body = b " {} " , Metadata = metadata , ContentType = content_type )
client . copy_object (
Bucket = " foobar " , CopySource = " foobar/the-key " , Key = " new-key " ,
)
resp = client . get_object ( Bucket = " foobar " , Key = " new-key " )
resp [ " Metadata " ] . should . equal ( metadata )
resp [ " ContentType " ] . should . equal ( content_type )
resp [ " ETag " ] . should . equal ( initial [ " ETag " ] )
2021-09-21 20:06:42 +00:00
@mock_s3
def test_copy_key_replace_metadata_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , " the-key " )
2021-11-11 11:51:09 +00:00
initial = key . put ( Body = b " some value " , Metadata = { " md " : " Metadatastring " } )
2021-09-21 20:06:42 +00:00
client . copy_object (
Bucket = " foobar " ,
CopySource = " foobar/the-key " ,
Key = " new-key " ,
Metadata = { " momd " : " Mometadatastring " } ,
MetadataDirective = " REPLACE " ,
)
resp = client . get_object ( Bucket = " foobar " , Key = " new-key " )
resp [ " Metadata " ] . should . equal ( { " momd " : " Mometadatastring " } )
2021-11-11 11:51:09 +00:00
resp [ " ETag " ] . should . equal ( initial [ " ETag " ] )
2021-09-21 20:06:42 +00:00
2022-02-08 10:02:09 +00:00
@mock_s3
def test_copy_key_without_changes_should_error_boto3 ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " my_bucket "
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
key_name = " my_key "
key = s3 . Object ( bucket_name , key_name )
s3 . create_bucket ( Bucket = bucket_name )
key . put ( Body = b " some value " )
with pytest . raises ( ClientError ) as e :
client . copy_object (
Bucket = bucket_name ,
CopySource = " {} / {} " . format ( bucket_name , key_name ) ,
Key = key_name ,
)
e . value . response [ " Error " ] [ " Message " ] . should . equal (
" This copy request is illegal because it is trying to copy an object to itself without changing the object ' s metadata, storage class, website redirect location or encryption attributes. "
)
2021-09-21 20:06:42 +00:00
@freeze_time ( " 2012-01-01 12:00:00 " )
@mock_s3
def test_last_modified_boto3 ( ) :
# See https://github.com/boto/boto/issues/466
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , " the-key " )
key . put ( Body = b " some value " , Metadata = { " md " : " Metadatastring " } )
rs = client . list_objects_v2 ( Bucket = " foobar " ) [ " Contents " ]
rs [ 0 ] [ " LastModified " ] . should . be . a ( datetime . datetime )
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp [ " LastModified " ] . should . be . a ( datetime . datetime )
as_header = resp [ " ResponseMetadata " ] [ " HTTPHeaders " ] [ " last-modified " ]
as_header . should . be . a ( str )
if not settings . TEST_SERVER_MODE :
as_header . should . equal ( " Sun, 01 Jan 2012 12:00:00 GMT " )
@mock_s3
def test_missing_bucket_boto3 ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
with pytest . raises ( ClientError ) as ex :
client . head_bucket ( Bucket = " mybucket " )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " 404 " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal ( " Not Found " )
with pytest . raises ( ClientError ) as ex :
client . head_bucket ( Bucket = " dash-in-name " )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " 404 " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal ( " Not Found " )
@mock_s3
def test_create_existing_bucket_boto3 ( ) :
" Trying to create a bucket that already exists should raise an Error "
client = boto3 . client ( " s3 " , region_name = " us-west-2 " )
kwargs = {
" Bucket " : " foobar " ,
" CreateBucketConfiguration " : { " LocationConstraint " : " us-west-2 " } ,
}
client . create_bucket ( * * kwargs )
with pytest . raises ( ClientError ) as ex :
client . create_bucket ( * * kwargs )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " BucketAlreadyOwnedByYou " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal (
" Your previous request to create the named bucket succeeded and you already own it. "
)
@mock_s3
def test_create_existing_bucket_in_us_east_1_boto3 ( ) :
" Trying to create a bucket that already exists in us-east-1 returns the bucket "
""" "
http : / / docs . aws . amazon . com / AmazonS3 / latest / API / ErrorResponses . html
Your previous request to create the named bucket succeeded and you already
own it . You get this error in all AWS regions except US Standard ,
us - east - 1. In us - east - 1 region , you will get 200 OK , but it is no - op ( if
bucket exists it Amazon S3 will not do anything ) .
"""
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " foobar " )
client . create_bucket ( Bucket = " foobar " )
@mock_s3
def test_bucket_deletion_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " foobar " )
key = s3 . Object ( " foobar " , " the-key " )
key . put ( Body = b " some value " )
# Try to delete a bucket that still has keys
with pytest . raises ( ClientError ) as ex :
client . delete_bucket ( Bucket = " foobar " )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " BucketNotEmpty " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal (
" The bucket you tried to delete is not empty "
)
client . delete_object ( Bucket = " foobar " , Key = " the-key " )
client . delete_bucket ( Bucket = " foobar " )
# Get non-existing bucket details
with pytest . raises ( ClientError ) as ex :
client . get_bucket_tagging ( Bucket = " foobar " )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchBucket " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal (
" The specified bucket does not exist "
)
# Delete non-existent bucket
with pytest . raises ( ClientError ) as ex :
client . delete_bucket ( Bucket = " foobar " )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchBucket " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal (
" The specified bucket does not exist "
)
@mock_s3
def test_get_all_buckets_boto3 ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " foobar " )
client . create_bucket ( Bucket = " foobar2 " )
client . list_buckets ( ) [ " Buckets " ] . should . have . length_of ( 2 )
@mock_s3
def test_post_to_bucket_boto3 ( ) :
if settings . TEST_SERVER_MODE :
# ServerMode does not allow unauthorized requests
raise SkipTest ( )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " foobar " )
requests . post (
" https://foobar.s3.amazonaws.com/ " , { " key " : " the-key " , " file " : " nothing " }
)
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp [ " Body " ] . read ( ) . should . equal ( b " nothing " )
@mock_s3
def test_post_with_metadata_to_bucket_boto3 ( ) :
if settings . TEST_SERVER_MODE :
# ServerMode does not allow unauthorized requests
raise SkipTest ( )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " foobar " )
requests . post (
" https://foobar.s3.amazonaws.com/ " ,
{ " key " : " the-key " , " file " : " nothing " , " x-amz-meta-test " : " metadata " } ,
)
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp [ " Metadata " ] . should . equal ( { " test " : " metadata " } )
2021-11-15 20:25:34 +00:00
@mock_s3
def test_delete_versioned_objects ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = " test "
key = " test "
s3 . create_bucket ( Bucket = bucket )
s3 . put_object ( Bucket = bucket , Key = key , Body = b " " )
s3 . put_bucket_versioning (
Bucket = bucket , VersioningConfiguration = { " Status " : " Enabled " } ,
)
objects = s3 . list_objects_v2 ( Bucket = bucket ) . get ( " Contents " )
versions = s3 . list_object_versions ( Bucket = bucket ) . get ( " Versions " )
delete_markers = s3 . list_object_versions ( Bucket = bucket ) . get ( " DeleteMarkers " )
objects . shouldnt . be . empty
versions . shouldnt . be . empty
delete_markers . should . be . none
s3 . delete_object ( Bucket = bucket , Key = key )
objects = s3 . list_objects_v2 ( Bucket = bucket ) . get ( " Contents " )
versions = s3 . list_object_versions ( Bucket = bucket ) . get ( " Versions " )
delete_markers = s3 . list_object_versions ( Bucket = bucket ) . get ( " DeleteMarkers " )
objects . should . be . none
versions . shouldnt . be . empty
delete_markers . shouldnt . be . empty
s3 . delete_object (
Bucket = bucket , Key = key , VersionId = versions [ 0 ] . get ( " VersionId " ) ,
)
objects = s3 . list_objects_v2 ( Bucket = bucket ) . get ( " Contents " )
versions = s3 . list_object_versions ( Bucket = bucket ) . get ( " Versions " )
delete_markers = s3 . list_object_versions ( Bucket = bucket ) . get ( " DeleteMarkers " )
objects . should . be . none
versions . should . be . none
delete_markers . shouldnt . be . empty
s3 . delete_object (
Bucket = bucket , Key = key , VersionId = delete_markers [ 0 ] . get ( " VersionId " ) ,
)
objects = s3 . list_objects_v2 ( Bucket = bucket ) . get ( " Contents " )
versions = s3 . list_object_versions ( Bucket = bucket ) . get ( " Versions " )
delete_markers = s3 . list_object_versions ( Bucket = bucket ) . get ( " DeleteMarkers " )
objects . should . be . none
versions . should . be . none
delete_markers . should . be . none
2021-09-21 20:06:42 +00:00
@mock_s3
def test_delete_missing_key_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
s3 . Object ( " foobar " , " key1 " ) . put ( Body = b " some value " )
s3 . Object ( " foobar " , " key2 " ) . put ( Body = b " some value " )
s3 . Object ( " foobar " , " key3 " ) . put ( Body = b " some value " )
s3 . Object ( " foobar " , " key4 " ) . put ( Body = b " some value " )
result = bucket . delete_objects (
Delete = {
" Objects " : [
{ " Key " : " unknown " } ,
{ " Key " : " key1 " } ,
{ " Key " : " key3 " } ,
{ " Key " : " typo " } ,
]
}
)
result . should . have . key ( " Deleted " ) . equal (
[ { " Key " : " unknown " } , { " Key " : " key1 " } , { " Key " : " key3 " } , { " Key " : " typo " } ]
)
result . shouldnt . have . key ( " Errors " )
objects = list ( bucket . objects . all ( ) )
set ( [ o . key for o in objects ] ) . should . equal ( set ( [ " key2 " , " key4 " ] ) )
2019-07-23 18:53:45 +00:00
@mock_s3
def test_boto3_delete_empty_keys_list ( ) :
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
boto3 . client ( " s3 " ) . delete_objects ( Bucket = " foobar " , Delete = { " Objects " : [ ] } )
2020-10-06 06:04:09 +00:00
assert err . value . response [ " Error " ] [ " Code " ] == " MalformedXML "
2019-06-18 12:36:32 +00:00
2014-07-09 00:35:48 +00:00
2022-01-18 15:18:57 +00:00
@pytest.mark.parametrize ( " name " , [ " firstname.lastname " , " with-dash " ] )
2021-09-21 20:06:42 +00:00
@mock_s3
2022-01-18 15:18:57 +00:00
def test_bucket_name_with_special_chars_boto3 ( name ) :
2021-09-21 20:06:42 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2022-01-18 15:18:57 +00:00
bucket = s3 . Bucket ( name )
2021-09-21 20:06:42 +00:00
bucket . create ( )
2022-01-18 15:18:57 +00:00
s3 . Object ( name , " the-key " ) . put ( Body = b " some value " )
2021-09-21 20:06:42 +00:00
2022-01-18 15:18:57 +00:00
resp = client . get_object ( Bucket = name , Key = " the-key " )
2021-09-21 20:06:42 +00:00
resp [ " Body " ] . read ( ) . should . equal ( b " some value " )
@pytest.mark.parametrize (
" key " , [ " normal " , " test_list_keys_2/x?y " , " /the-key-unîcode/test " ]
)
@mock_s3
def test_key_with_special_characters_boto3 ( key ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " testname " )
bucket . create ( )
s3 . Object ( " testname " , key ) . put ( Body = b " value " )
objects = list ( bucket . objects . all ( ) )
[ o . key for o in objects ] . should . equal ( [ key ] )
resp = client . get_object ( Bucket = " testname " , Key = key )
resp [ " Body " ] . read ( ) . should . equal ( b " value " )
@mock_s3
def test_bucket_key_listing_order_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " test_bucket "
bucket = s3 . Bucket ( bucket_name )
bucket . create ( )
prefix = " toplevel/ "
names = [ " x/key " , " y.key1 " , " y.key2 " , " y.key3 " , " x/y/key " , " x/y/z/key " ]
for name in names :
s3 . Object ( bucket_name , prefix + name ) . put ( Body = b " somedata " )
delimiter = " "
keys = [ x . key for x in bucket . objects . filter ( Prefix = prefix , Delimiter = delimiter ) ]
keys . should . equal (
[
" toplevel/x/key " ,
" toplevel/x/y/key " ,
" toplevel/x/y/z/key " ,
" toplevel/y.key1 " ,
" toplevel/y.key2 " ,
" toplevel/y.key3 " ,
]
)
delimiter = " / "
keys = [ x . key for x in bucket . objects . filter ( Prefix = prefix , Delimiter = delimiter ) ]
keys . should . equal ( [ " toplevel/y.key1 " , " toplevel/y.key2 " , " toplevel/y.key3 " ] )
# Test delimiter with no prefix
keys = [ x . key for x in bucket . objects . filter ( Delimiter = delimiter ) ]
keys . should . equal ( [ ] )
prefix = " toplevel/x "
keys = [ x . key for x in bucket . objects . filter ( Prefix = prefix ) ]
keys . should . equal ( [ " toplevel/x/key " , " toplevel/x/y/key " , " toplevel/x/y/z/key " ] )
keys = [ x . key for x in bucket . objects . filter ( Prefix = prefix , Delimiter = delimiter ) ]
keys . should . equal ( [ ] )
@mock_s3
def test_key_with_reduced_redundancy_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " test_bucket "
bucket = s3 . Bucket ( bucket_name )
bucket . create ( )
bucket . put_object (
Key = " test_rr_key " , Body = b " somedata " , StorageClass = " REDUCED_REDUNDANCY "
)
# we use the bucket iterator because of:
# https:/github.com/boto/boto/issues/1173
[ x . storage_class for x in bucket . objects . all ( ) ] . should . equal ( [ " REDUCED_REDUNDANCY " ] )
@mock_s3
def test_copy_key_reduced_redundancy_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " test_bucket " )
bucket . create ( )
bucket . put_object ( Key = " the-key " , Body = b " somedata " )
client . copy_object (
Bucket = " test_bucket " ,
CopySource = " test_bucket/the-key " ,
Key = " new-key " ,
StorageClass = " REDUCED_REDUNDANCY " ,
)
keys = dict ( [ ( k . key , k ) for k in bucket . objects . all ( ) ] )
keys [ " new-key " ] . storage_class . should . equal ( " REDUCED_REDUNDANCY " )
keys [ " the-key " ] . storage_class . should . equal ( " STANDARD " )
@freeze_time ( " 2012-01-01 12:00:00 " )
@mock_s3
def test_restore_key_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
2021-12-07 13:00:42 +00:00
key = bucket . put_object ( Key = " the-key " , Body = b " somedata " , StorageClass = " GLACIER " )
2021-09-21 20:06:42 +00:00
key . restore . should . be . none
key . restore_object ( RestoreRequest = { " Days " : 1 } )
if settings . TEST_SERVER_MODE :
key . restore . should . contain ( ' ongoing-request= " false " ' )
else :
key . restore . should . equal (
' ongoing-request= " false " , expiry-date= " Mon, 02 Jan 2012 12:00:00 GMT " '
)
key . restore_object ( RestoreRequest = { " Days " : 2 } )
if settings . TEST_SERVER_MODE :
key . restore . should . contain ( ' ongoing-request= " false " ' )
else :
key . restore . should . equal (
' ongoing-request= " false " , expiry-date= " Tue, 03 Jan 2012 12:00:00 GMT " '
)
2021-12-07 13:00:42 +00:00
@mock_s3
def test_cannot_restore_standard_class_object_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
key = bucket . put_object ( Key = " the-key " , Body = b " somedata " )
with pytest . raises ( Exception ) as err :
key . restore_object ( RestoreRequest = { " Days " : 1 } )
err = err . value . response [ " Error " ]
err [ " Code " ] . should . equal ( " InvalidObjectState " )
err [ " StorageClass " ] . should . equal ( " STANDARD " )
err [ " Message " ] . should . equal (
" The operation is not valid for the object ' s storage class "
)
2021-09-21 20:06:42 +00:00
@mock_s3
def test_get_versioning_status_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
v = s3 . BucketVersioning ( " foobar " )
v . status . should . be . none
v . enable ( )
v . status . should . equal ( " Enabled " )
v . suspend ( )
v . status . should . equal ( " Suspended " )
@mock_s3
def test_key_version_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
bucket . Versioning ( ) . enable ( )
versions = [ ]
key = bucket . put_object ( Key = " the-key " , Body = b " somedata " )
versions . append ( key . version_id )
key . put ( Body = b " some string " )
versions . append ( key . version_id )
set ( versions ) . should . have . length_of ( 2 )
key = client . get_object ( Bucket = " foobar " , Key = " the-key " )
key [ " VersionId " ] . should . equal ( versions [ - 1 ] )
@mock_s3
def test_list_versions_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
bucket . Versioning ( ) . enable ( )
key_versions = [ ]
key = bucket . put_object ( Key = " the-key " , Body = b " Version 1 " )
key_versions . append ( key . version_id )
key = bucket . put_object ( Key = " the-key " , Body = b " Version 2 " )
key_versions . append ( key . version_id )
key_versions . should . have . length_of ( 2 )
versions = client . list_object_versions ( Bucket = " foobar " ) [ " Versions " ]
versions . should . have . length_of ( 2 )
versions [ 0 ] [ " Key " ] . should . equal ( " the-key " )
versions [ 0 ] [ " VersionId " ] . should . equal ( key_versions [ 1 ] )
resp = client . get_object ( Bucket = " foobar " , Key = " the-key " )
resp [ " Body " ] . read ( ) . should . equal ( b " Version 2 " )
resp = client . get_object (
Bucket = " foobar " , Key = " the-key " , VersionId = versions [ 0 ] [ " VersionId " ]
)
resp [ " Body " ] . read ( ) . should . equal ( b " Version 2 " )
versions [ 1 ] [ " Key " ] . should . equal ( " the-key " )
versions [ 1 ] [ " VersionId " ] . should . equal ( key_versions [ 0 ] )
resp = client . get_object (
Bucket = " foobar " , Key = " the-key " , VersionId = versions [ 1 ] [ " VersionId " ]
)
resp [ " Body " ] . read ( ) . should . equal ( b " Version 1 " )
bucket . put_object ( Key = " the2-key " , Body = b " Version 1 " )
list ( bucket . objects . all ( ) ) . should . have . length_of ( 2 )
versions = client . list_object_versions ( Bucket = " foobar " , Prefix = " the2 " ) [ " Versions " ]
versions . should . have . length_of ( 1 )
2022-01-18 15:18:57 +00:00
@mock_s3
def test_acl_setting_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
2015-10-07 07:04:22 +00:00
2019-10-31 15:44:26 +00:00
content = b " imafile "
keyname = " test.txt "
2022-01-18 15:18:57 +00:00
bucket . put_object (
Key = keyname , Body = content , ContentType = " text/plain " , ACL = " public-read "
2019-10-31 15:44:26 +00:00
)
2015-10-07 07:04:22 +00:00
2022-01-18 15:18:57 +00:00
grants = client . get_object_acl ( Bucket = " foobar " , Key = keyname ) [ " Grants " ]
grants . should . contain (
{
" Grantee " : {
" Type " : " Group " ,
" URI " : " http://acs.amazonaws.com/groups/global/AllUsers " ,
} ,
" Permission " : " READ " ,
}
)
2015-10-07 07:04:22 +00:00
2022-01-18 15:18:57 +00:00
@mock_s3
def test_acl_setting_via_headers_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
2015-10-07 07:04:22 +00:00
2019-10-31 15:44:26 +00:00
keyname = " test.txt "
2015-10-07 07:04:22 +00:00
2022-01-18 15:18:57 +00:00
bucket . put_object ( Key = keyname , Body = b " imafile " )
client . put_object_acl ( ACL = " public-read " , Bucket = " foobar " , Key = keyname )
2015-10-07 07:04:22 +00:00
2022-01-18 15:18:57 +00:00
grants = client . get_object_acl ( Bucket = " foobar " , Key = keyname ) [ " Grants " ]
grants . should . contain (
{
" Grantee " : {
" Type " : " Group " ,
" URI " : " http://acs.amazonaws.com/groups/global/AllUsers " ,
} ,
" Permission " : " READ " ,
}
)
2015-10-07 07:04:22 +00:00
2014-11-22 15:19:56 +00:00
2022-01-18 15:18:57 +00:00
@mock_s3
def test_acl_switching_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " foobar " )
bucket . create ( )
keyname = " test.txt "
2015-11-12 01:26:29 +00:00
2022-01-18 15:18:57 +00:00
bucket . put_object ( Key = keyname , Body = b " asdf " , ACL = " public-read " )
client . put_object_acl ( ACL = " private " , Bucket = " foobar " , Key = keyname )
2015-11-12 01:26:29 +00:00
2022-01-18 15:18:57 +00:00
grants = client . get_object_acl ( Bucket = " foobar " , Key = keyname ) [ " Grants " ]
grants . shouldnt . contain (
{
" Grantee " : {
" Type " : " Group " ,
" URI " : " http://acs.amazonaws.com/groups/global/AllUsers " ,
} ,
" Permission " : " READ " ,
}
)
2015-11-12 01:26:29 +00:00
2022-01-18 15:18:57 +00:00
@mock_s3
def test_acl_switching_nonexistent_key ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " mybucket " )
2015-11-12 01:26:29 +00:00
2022-01-18 15:18:57 +00:00
with pytest . raises ( ClientError ) as e :
s3 . put_object_acl ( Bucket = " mybucket " , Key = " nonexistent " , ACL = " private " )
2015-11-12 01:26:29 +00:00
2022-01-18 15:18:57 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchKey " )
2015-11-12 01:26:29 +00:00
2020-06-11 05:50:50 +00:00
@mock_s3
def test_streaming_upload_from_file_to_presigned_url ( ) :
2020-06-19 10:44:43 +00:00
s3 = boto3 . resource ( " s3 " , region_name = " us-east-1 " )
2020-06-11 05:50:50 +00:00
bucket = s3 . Bucket ( " test-bucket " )
bucket . create ( )
bucket . put_object ( Body = b " ABCD " , Key = " file.txt " )
params = { " Bucket " : " test-bucket " , " Key " : " file.txt " }
presigned_url = boto3 . client ( " s3 " ) . generate_presigned_url (
" put_object " , params , ExpiresIn = 900
)
with open ( __file__ , " rb " ) as f :
response = requests . get ( presigned_url , data = f )
assert response . status_code == 200
2020-09-02 06:10:56 +00:00
@mock_s3
def test_multipart_upload_from_file_to_presigned_url ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " mybucket " )
params = { " Bucket " : " mybucket " , " Key " : " file_upload " }
presigned_url = boto3 . client ( " s3 " ) . generate_presigned_url (
" put_object " , params , ExpiresIn = 900
)
file = open ( " text.txt " , " w " )
file . write ( " test " )
file . close ( )
files = { " upload_file " : open ( " text.txt " , " rb " ) }
requests . put ( presigned_url , files = files )
resp = s3 . get_object ( Bucket = " mybucket " , Key = " file_upload " )
data = resp [ " Body " ] . read ( )
assert data == b " test "
# cleanup
os . remove ( " text.txt " )
2021-08-19 14:06:43 +00:00
@mock_s3
def test_put_chunked_with_v4_signature_in_body ( ) :
bucket_name = " mybucket "
file_name = " file "
content = " CONTENT "
content_bytes = bytes ( content , encoding = " utf8 " )
# 'CONTENT' as received in moto, when PutObject is called in java AWS SDK v2
chunked_body = b " 7;chunk-signature=bd479c607ec05dd9d570893f74eed76a4b333dfa37ad6446f631ec47dc52e756 \r \n CONTENT \r \n 0;chunk-signature=d192ec4075ddfc18d2ef4da4f55a87dc762ba4417b3bd41e70c282f8bec2ece0 \r \n \r \n "
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = bucket_name )
model = MyModel ( file_name , content )
model . save ( )
boto_etag = s3 . get_object ( Bucket = bucket_name , Key = file_name ) [ " ETag " ]
params = { " Bucket " : bucket_name , " Key " : file_name }
# We'll use manipulated presigned PUT, to mimick PUT from SDK
presigned_url = boto3 . client ( " s3 " ) . generate_presigned_url (
" put_object " , params , ExpiresIn = 900
)
requests . put (
presigned_url ,
data = chunked_body ,
headers = {
" Content-Type " : " application/octet-stream " ,
" x-amz-content-sha256 " : " STREAMING-AWS4-HMAC-SHA256-PAYLOAD " ,
" x-amz-decoded-content-length " : str ( len ( content_bytes ) ) ,
} ,
)
resp = s3 . get_object ( Bucket = bucket_name , Key = file_name )
body = resp [ " Body " ] . read ( )
assert body == content_bytes
etag = resp [ " ETag " ]
assert etag == boto_etag
2021-06-10 09:48:28 +00:00
@mock_s3
def test_default_key_buffer_size ( ) :
# save original DEFAULT_KEY_BUFFER_SIZE environment variable content
original_default_key_buffer_size = os . environ . get (
" MOTO_S3_DEFAULT_KEY_BUFFER_SIZE " , None
)
os . environ [ " MOTO_S3_DEFAULT_KEY_BUFFER_SIZE " ] = " 2 " # 2 bytes
assert get_s3_default_key_buffer_size ( ) == 2
2021-10-18 19:44:29 +00:00
fk = s3model . FakeKey ( " a " , os . urandom ( 1 ) ) # 1 byte string
2021-06-10 09:48:28 +00:00
assert fk . _value_buffer . _rolled == False
os . environ [ " MOTO_S3_DEFAULT_KEY_BUFFER_SIZE " ] = " 1 " # 1 byte
assert get_s3_default_key_buffer_size ( ) == 1
2021-10-18 19:44:29 +00:00
fk = s3model . FakeKey ( " a " , os . urandom ( 3 ) ) # 3 byte string
2021-06-10 09:48:28 +00:00
assert fk . _value_buffer . _rolled == True
2021-07-07 15:38:50 +00:00
# if no MOTO_S3_DEFAULT_KEY_BUFFER_SIZE env variable is present the buffer size should be less than
# S3_UPLOAD_PART_MIN_SIZE to prevent in memory caching of multi part uploads
del os . environ [ " MOTO_S3_DEFAULT_KEY_BUFFER_SIZE " ]
assert get_s3_default_key_buffer_size ( ) < S3_UPLOAD_PART_MIN_SIZE
2021-06-10 09:48:28 +00:00
# restore original environment variable content
if original_default_key_buffer_size :
os . environ [ " MOTO_S3_DEFAULT_KEY_BUFFER_SIZE " ] = original_default_key_buffer_size
2022-01-18 15:18:57 +00:00
@mock_s3
def test_s3_object_in_private_bucket ( ) :
s3 = boto3 . resource ( " s3 " )
bucket = s3 . Bucket ( " test-bucket " )
bucket . create (
ACL = " private " , CreateBucketConfiguration = { " LocationConstraint " : " us-west-1 " }
)
bucket . put_object ( ACL = " private " , Body = b " ABCD " , Key = " file.txt " )
s3_anonymous = boto3 . resource ( " s3 " )
s3_anonymous . meta . client . meta . events . register ( " choose-signer.s3.* " , disable_signing )
with pytest . raises ( ClientError ) as exc :
s3_anonymous . Object ( key = " file.txt " , bucket_name = " test-bucket " ) . get ( )
exc . value . response [ " Error " ] [ " Code " ] . should . equal ( " 403 " )
bucket . put_object ( ACL = " public-read " , Body = b " ABCD " , Key = " file.txt " )
contents = (
s3_anonymous . Object ( key = " file.txt " , bucket_name = " test-bucket " )
. get ( ) [ " Body " ]
. read ( )
)
contents . should . equal ( b " ABCD " )
2014-11-22 15:19:56 +00:00
2021-09-21 20:06:42 +00:00
@mock_s3
def test_unicode_key_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " mybucket " )
bucket . create ( )
key = bucket . put_object ( Key = " こんにちは.jpg " , Body = b " Hello world! " )
[ listed_key . key for listed_key in bucket . objects . all ( ) ] . should . equal ( [ key . key ] )
fetched_key = s3 . Object ( " mybucket " , key . key )
fetched_key . key . should . equal ( key . key )
fetched_key . get ( ) [ " Body " ] . read ( ) . decode ( " utf-8 " ) . should . equal ( " Hello world! " )
@mock_s3
def test_unicode_value_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " mybucket " )
bucket . create ( )
bucket . put_object ( Key = " some_key " , Body = " こんにちは.jpg " )
key = s3 . Object ( " mybucket " , " some_key " )
key . get ( ) [ " Body " ] . read ( ) . decode ( " utf-8 " ) . should . equal ( " こんにちは.jpg " )
@mock_s3
def test_setting_content_encoding_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " mybucket " )
bucket . create ( )
bucket . put_object ( Body = b " abcdef " , ContentEncoding = " gzip " , Key = " keyname " )
key = s3 . Object ( " mybucket " , " keyname " )
key . content_encoding . should . equal ( " gzip " )
2018-12-29 12:07:29 +00:00
@mock_s3
2020-02-02 10:36:51 +00:00
def test_bucket_location_default ( ) :
cli = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
2018-11-20 18:42:51 +00:00
# No LocationConstraint ==> us-east-1
cli . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
cli . get_bucket_location ( Bucket = bucket_name ) [ " LocationConstraint " ] . should . equal ( None )
2018-11-20 18:42:51 +00:00
2020-02-02 10:36:51 +00:00
@mock_s3
def test_bucket_location_nondefault ( ) :
cli = boto3 . client ( " s3 " , region_name = " eu-central-1 " )
bucket_name = " mybucket "
# LocationConstraint set for non default regions
2021-10-18 19:44:29 +00:00
cli . create_bucket (
2020-02-02 10:36:51 +00:00
Bucket = bucket_name ,
CreateBucketConfiguration = { " LocationConstraint " : " eu-central-1 " } ,
)
cli . get_bucket_location ( Bucket = bucket_name ) [ " LocationConstraint " ] . should . equal (
" eu-central-1 "
)
2020-02-02 12:45:57 +00:00
# Test uses current Region to determine whether to throw an error
# Region is retrieved based on current URL
# URL will always be localhost in Server Mode, so can't run it there
if not settings . TEST_SERVER_MODE :
@mock_s3
def test_s3_location_should_error_outside_useast1 ( ) :
s3 = boto3 . client ( " s3 " , region_name = " eu-west-1 " )
bucket_name = " asdfasdfsdfdsfasda "
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2020-02-02 12:45:57 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Message " ] . should . equal (
2020-02-02 12:45:57 +00:00
" The unspecified location constraint is incompatible for the region specific endpoint this request was sent to. "
)
2020-02-02 10:36:51 +00:00
2021-09-21 20:06:42 +00:00
@mock_s3
def test_ranged_get_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket = s3 . Bucket ( " mybucket " )
bucket . create ( )
rep = b " 0123456789 "
key = bucket . put_object ( Key = " bigkey " , Body = rep * 10 )
# Implicitly bounded range requests.
key . get ( Range = " bytes=0- " ) [ " Body " ] . read ( ) . should . equal ( rep * 10 )
key . get ( Range = " bytes=50- " ) [ " Body " ] . read ( ) . should . equal ( rep * 5 )
key . get ( Range = " bytes=99- " ) [ " Body " ] . read ( ) . should . equal ( b " 9 " )
# Explicitly bounded range requests starting from the first byte.
key . get ( Range = " bytes=0-0 " ) [ " Body " ] . read ( ) . should . equal ( b " 0 " )
key . get ( Range = " bytes=0-49 " ) [ " Body " ] . read ( ) . should . equal ( rep * 5 )
key . get ( Range = " bytes=0-99 " ) [ " Body " ] . read ( ) . should . equal ( rep * 10 )
key . get ( Range = " bytes=0-100 " ) [ " Body " ] . read ( ) . should . equal ( rep * 10 )
key . get ( Range = " bytes=0-700 " ) [ " Body " ] . read ( ) . should . equal ( rep * 10 )
# Explicitly bounded range requests starting from the / a middle byte.
key . get ( Range = " bytes=50-54 " ) [ " Body " ] . read ( ) . should . equal ( rep [ : 5 ] )
key . get ( Range = " bytes=50-99 " ) [ " Body " ] . read ( ) . should . equal ( rep * 5 )
key . get ( Range = " bytes=50-100 " ) [ " Body " ] . read ( ) . should . equal ( rep * 5 )
key . get ( Range = " bytes=50-700 " ) [ " Body " ] . read ( ) . should . equal ( rep * 5 )
# Explicitly bounded range requests starting from the last byte.
key . get ( Range = " bytes=99-99 " ) [ " Body " ] . read ( ) . should . equal ( b " 9 " )
key . get ( Range = " bytes=99-100 " ) [ " Body " ] . read ( ) . should . equal ( b " 9 " )
key . get ( Range = " bytes=99-700 " ) [ " Body " ] . read ( ) . should . equal ( b " 9 " )
# Suffix range requests.
key . get ( Range = " bytes=-1 " ) [ " Body " ] . read ( ) . should . equal ( b " 9 " )
key . get ( Range = " bytes=-60 " ) [ " Body " ] . read ( ) . should . equal ( rep * 6 )
key . get ( Range = " bytes=-100 " ) [ " Body " ] . read ( ) . should . equal ( rep * 10 )
key . get ( Range = " bytes=-101 " ) [ " Body " ] . read ( ) . should . equal ( rep * 10 )
key . get ( Range = " bytes=-700 " ) [ " Body " ] . read ( ) . should . equal ( rep * 10 )
key . content_length . should . equal ( 100 )
@mock_s3
def test_policy_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " mybucket "
bucket = s3 . Bucket ( bucket_name )
bucket . create ( )
policy = json . dumps (
{
" Version " : " 2012-10-17 " ,
" Id " : " PutObjPolicy " ,
" Statement " : [
{
" Sid " : " DenyUnEncryptedObjectUploads " ,
" Effect " : " Deny " ,
" Principal " : " * " ,
" Action " : " s3:PutObject " ,
" Resource " : " arn:aws:s3::: {bucket_name} /* " . format (
bucket_name = bucket_name
) ,
" Condition " : {
" StringNotEquals " : {
" s3:x-amz-server-side-encryption " : " aws:kms "
}
} ,
}
] ,
}
)
with pytest . raises ( ClientError ) as ex :
client . get_bucket_policy ( Bucket = bucket_name )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchBucketPolicy " )
ex . value . response [ " Error " ] [ " Message " ] . should . equal (
" The bucket policy does not exist "
)
client . put_bucket_policy ( Bucket = bucket_name , Policy = policy )
client . get_bucket_policy ( Bucket = bucket_name ) [ " Policy " ] . should . equal ( policy )
client . delete_bucket_policy ( Bucket = bucket_name )
with pytest . raises ( ClientError ) as ex :
client . get_bucket_policy ( Bucket = bucket_name )
ex . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchBucketPolicy " )
@mock_s3
def test_website_configuration_xml_boto3 ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " mybucket "
bucket = s3 . Bucket ( bucket_name )
bucket . create ( )
client . put_bucket_website (
Bucket = bucket_name ,
WebsiteConfiguration = {
" IndexDocument " : { " Suffix " : " index.html " } ,
" RoutingRules " : [
{
" Condition " : { " KeyPrefixEquals " : " test/testing " } ,
" Redirect " : { " ReplaceKeyWith " : " test.txt " } ,
}
] ,
} ,
)
c = client . get_bucket_website ( Bucket = bucket_name )
c . should . have . key ( " IndexDocument " ) . equals ( { " Suffix " : " index.html " } )
c . should . have . key ( " RoutingRules " )
c [ " RoutingRules " ] . should . have . length_of ( 1 )
rule = c [ " RoutingRules " ] [ 0 ]
rule . should . have . key ( " Condition " ) . equals ( { " KeyPrefixEquals " : " test/testing " } )
rule . should . have . key ( " Redirect " ) . equals ( { " ReplaceKeyWith " : " test.txt " } )
c . shouldnt . have . key ( " RedirectAllRequestsTo " )
c . shouldnt . have . key ( " ErrorDocument " )
2016-09-20 22:42:21 +00:00
@mock_s3
def test_boto3_key_etag ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
s3 . put_object ( Bucket = " mybucket " , Key = " steve " , Body = b " is awesome " )
resp = s3 . get_object ( Bucket = " mybucket " , Key = " steve " )
resp [ " ETag " ] . should . equal ( ' " d32bda93738f7e03adb22e66c90fbc04 " ' )
2016-09-20 22:42:21 +00:00
2018-01-03 04:47:57 +00:00
2017-04-14 11:32:52 +00:00
@mock_s3
def test_website_redirect_location ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
2017-04-14 11:32:52 +00:00
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = " mybucket " , Key = " steve " , Body = b " is awesome " )
resp = s3 . get_object ( Bucket = " mybucket " , Key = " steve " )
resp . get ( " WebsiteRedirectLocation " ) . should . be . none
2017-04-14 11:32:52 +00:00
2019-10-31 15:44:26 +00:00
url = " https://github.com/spulec/moto "
s3 . put_object (
Bucket = " mybucket " , Key = " steve " , Body = b " is awesome " , WebsiteRedirectLocation = url
)
resp = s3 . get_object ( Bucket = " mybucket " , Key = " steve " )
resp [ " WebsiteRedirectLocation " ] . should . equal ( url )
2017-02-24 02:37:43 +00:00
2018-01-03 04:47:57 +00:00
2020-09-21 15:21:18 +00:00
@mock_s3
def test_delimiter_optional_in_response ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " mybucket " )
s3 . put_object ( Bucket = " mybucket " , Key = " one " , Body = b " 1 " )
resp = s3 . list_objects ( Bucket = " mybucket " , MaxKeys = 1 )
assert resp . get ( " Delimiter " ) is None
resp = s3 . list_objects ( Bucket = " mybucket " , MaxKeys = 1 , Delimiter = " / " )
assert resp . get ( " Delimiter " ) == " / "
2021-10-14 09:49:50 +00:00
@mock_s3
def test_list_objects_with_pagesize_0 ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " mybucket " )
resp = s3 . list_objects ( Bucket = " mybucket " , MaxKeys = 0 )
resp [ " Name " ] . should . equal ( " mybucket " )
resp [ " MaxKeys " ] . should . equal ( 0 )
resp [ " IsTruncated " ] . should . equal ( False )
resp . shouldnt . have . key ( " Contents " )
2019-10-21 01:40:47 +00:00
@mock_s3
def test_boto3_list_objects_truncated_response ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
s3 . put_object ( Bucket = " mybucket " , Key = " one " , Body = b " 1 " )
s3 . put_object ( Bucket = " mybucket " , Key = " two " , Body = b " 22 " )
s3 . put_object ( Bucket = " mybucket " , Key = " three " , Body = b " 333 " )
2019-10-21 01:40:47 +00:00
# First list
2019-10-31 15:44:26 +00:00
resp = s3 . list_objects ( Bucket = " mybucket " , MaxKeys = 1 )
listed_object = resp [ " Contents " ] [ 0 ]
2019-10-21 01:40:47 +00:00
2019-10-31 15:44:26 +00:00
assert listed_object [ " Key " ] == " one "
assert resp [ " MaxKeys " ] == 1
assert resp [ " IsTruncated " ] == True
2019-12-21 12:27:49 +00:00
assert resp . get ( " Prefix " ) is None
2020-09-21 15:21:18 +00:00
assert resp . get ( " Delimiter " ) is None
2019-10-31 15:44:26 +00:00
assert " NextMarker " in resp
2019-10-21 01:40:47 +00:00
next_marker = resp [ " NextMarker " ]
# Second list
2019-10-31 15:44:26 +00:00
resp = s3 . list_objects ( Bucket = " mybucket " , MaxKeys = 1 , Marker = next_marker )
listed_object = resp [ " Contents " ] [ 0 ]
2019-10-21 01:40:47 +00:00
2019-10-31 15:44:26 +00:00
assert listed_object [ " Key " ] == " three "
assert resp [ " MaxKeys " ] == 1
assert resp [ " IsTruncated " ] == True
2019-12-21 12:27:49 +00:00
assert resp . get ( " Prefix " ) is None
2020-09-21 15:21:18 +00:00
assert resp . get ( " Delimiter " ) is None
2019-10-31 15:44:26 +00:00
assert " NextMarker " in resp
2019-10-21 01:40:47 +00:00
next_marker = resp [ " NextMarker " ]
# Third list
2019-10-31 15:44:26 +00:00
resp = s3 . list_objects ( Bucket = " mybucket " , MaxKeys = 1 , Marker = next_marker )
listed_object = resp [ " Contents " ] [ 0 ]
2019-10-21 01:40:47 +00:00
2019-10-31 15:44:26 +00:00
assert listed_object [ " Key " ] == " two "
assert resp [ " MaxKeys " ] == 1
assert resp [ " IsTruncated " ] == False
2019-12-21 12:27:49 +00:00
assert resp . get ( " Prefix " ) is None
2020-09-21 15:21:18 +00:00
assert resp . get ( " Delimiter " ) is None
2019-10-31 15:44:26 +00:00
assert " NextMarker " not in resp
2019-10-21 01:40:47 +00:00
2016-12-03 23:13:24 +00:00
@mock_s3
def test_boto3_list_keys_xml_escaped ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
key_name = " Q&A.txt "
s3 . put_object ( Bucket = " mybucket " , Key = key_name , Body = b " is awesome " )
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
resp = s3 . list_objects_v2 ( Bucket = " mybucket " , Prefix = key_name )
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
assert resp [ " Contents " ] [ 0 ] [ " Key " ] == key_name
assert resp [ " KeyCount " ] == 1
assert resp [ " MaxKeys " ] == 1000
assert resp [ " Prefix " ] == key_name
assert resp [ " IsTruncated " ] == False
assert " Delimiter " not in resp
assert " StartAfter " not in resp
assert " NextContinuationToken " not in resp
assert " Owner " not in resp [ " Contents " ] [ 0 ]
2017-02-09 02:21:43 +00:00
2018-12-06 00:17:28 +00:00
@mock_s3
def test_boto3_list_objects_v2_common_prefix_pagination ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
2018-12-06 00:17:28 +00:00
max_keys = 1
2019-10-31 15:44:26 +00:00
keys = [ " test/ {i} / {i} " . format ( i = i ) for i in range ( 3 ) ]
2018-12-06 00:17:28 +00:00
for key in keys :
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = " mybucket " , Key = key , Body = b " v " )
2018-12-06 00:17:28 +00:00
prefixes = [ ]
2019-10-31 15:44:26 +00:00
args = {
" Bucket " : " mybucket " ,
" Delimiter " : " / " ,
" Prefix " : " test/ " ,
" MaxKeys " : max_keys ,
}
2018-12-06 00:17:28 +00:00
resp = { " IsTruncated " : True }
while resp . get ( " IsTruncated " , False ) :
if " NextContinuationToken " in resp :
args [ " ContinuationToken " ] = resp [ " NextContinuationToken " ]
resp = s3 . list_objects_v2 ( * * args )
if " CommonPrefixes " in resp :
assert len ( resp [ " CommonPrefixes " ] ) == max_keys
prefixes . extend ( i [ " Prefix " ] for i in resp [ " CommonPrefixes " ] )
2019-10-31 15:44:26 +00:00
assert prefixes == [ k [ : k . rindex ( " / " ) + 1 ] for k in keys ]
2018-12-06 00:17:28 +00:00
2020-12-13 13:38:25 +00:00
@mock_s3
def test_boto3_list_objects_v2_common_invalid_continuation_token ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " mybucket " )
max_keys = 1
keys = [ " test/ {i} / {i} " . format ( i = i ) for i in range ( 3 ) ]
for key in keys :
s3 . put_object ( Bucket = " mybucket " , Key = key , Body = b " v " )
args = {
" Bucket " : " mybucket " ,
" Delimiter " : " / " ,
" Prefix " : " test/ " ,
" MaxKeys " : max_keys ,
" ContinuationToken " : " " ,
}
with pytest . raises ( botocore . exceptions . ClientError ) as exc :
s3 . list_objects_v2 ( * * args )
exc . value . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidArgument " )
exc . value . response [ " Error " ] [ " Message " ] . should . equal (
" The continuation token provided is incorrect "
)
2017-02-09 02:21:43 +00:00
@mock_s3
def test_boto3_list_objects_v2_truncated_response ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
s3 . put_object ( Bucket = " mybucket " , Key = " one " , Body = b " 1 " )
s3 . put_object ( Bucket = " mybucket " , Key = " two " , Body = b " 22 " )
s3 . put_object ( Bucket = " mybucket " , Key = " three " , Body = b " 333 " )
2017-02-09 02:21:43 +00:00
# First list
2019-10-31 15:44:26 +00:00
resp = s3 . list_objects_v2 ( Bucket = " mybucket " , MaxKeys = 1 )
listed_object = resp [ " Contents " ] [ 0 ]
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
assert listed_object [ " Key " ] == " one "
assert resp [ " MaxKeys " ] == 1
assert resp [ " Prefix " ] == " "
assert resp [ " KeyCount " ] == 1
assert resp [ " IsTruncated " ] == True
assert " Delimiter " not in resp
assert " StartAfter " not in resp
assert " Owner " not in listed_object # owner info was not requested
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
next_token = resp [ " NextContinuationToken " ]
2017-02-09 02:21:43 +00:00
# Second list
2017-02-24 02:37:43 +00:00
resp = s3 . list_objects_v2 (
2019-10-31 15:44:26 +00:00
Bucket = " mybucket " , MaxKeys = 1 , ContinuationToken = next_token
)
listed_object = resp [ " Contents " ] [ 0 ]
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
assert listed_object [ " Key " ] == " three "
assert resp [ " MaxKeys " ] == 1
assert resp [ " Prefix " ] == " "
assert resp [ " KeyCount " ] == 1
assert resp [ " IsTruncated " ] == True
assert " Delimiter " not in resp
assert " StartAfter " not in resp
assert " Owner " not in listed_object
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
next_token = resp [ " NextContinuationToken " ]
2017-02-09 02:21:43 +00:00
# Third list
2017-02-24 02:37:43 +00:00
resp = s3 . list_objects_v2 (
2019-10-31 15:44:26 +00:00
Bucket = " mybucket " , MaxKeys = 1 , ContinuationToken = next_token
)
listed_object = resp [ " Contents " ] [ 0 ]
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
assert listed_object [ " Key " ] == " two "
assert resp [ " MaxKeys " ] == 1
assert resp [ " Prefix " ] == " "
assert resp [ " KeyCount " ] == 1
assert resp [ " IsTruncated " ] == False
assert " Delimiter " not in resp
assert " Owner " not in listed_object
assert " StartAfter " not in resp
assert " NextContinuationToken " not in resp
2017-02-09 02:21:43 +00:00
@mock_s3
def test_boto3_list_objects_v2_truncated_response_start_after ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
s3 . put_object ( Bucket = " mybucket " , Key = " one " , Body = b " 1 " )
s3 . put_object ( Bucket = " mybucket " , Key = " two " , Body = b " 22 " )
s3 . put_object ( Bucket = " mybucket " , Key = " three " , Body = b " 333 " )
2017-02-09 02:21:43 +00:00
# First list
2019-10-31 15:44:26 +00:00
resp = s3 . list_objects_v2 ( Bucket = " mybucket " , MaxKeys = 1 , StartAfter = " one " )
listed_object = resp [ " Contents " ] [ 0 ]
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
assert listed_object [ " Key " ] == " three "
assert resp [ " MaxKeys " ] == 1
assert resp [ " Prefix " ] == " "
assert resp [ " KeyCount " ] == 1
assert resp [ " IsTruncated " ] == True
assert resp [ " StartAfter " ] == " one "
assert " Delimiter " not in resp
assert " Owner " not in listed_object
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
next_token = resp [ " NextContinuationToken " ]
2017-02-09 02:21:43 +00:00
# Second list
# The ContinuationToken must take precedence over StartAfter.
2019-10-31 15:44:26 +00:00
resp = s3 . list_objects_v2 (
Bucket = " mybucket " , MaxKeys = 1 , StartAfter = " one " , ContinuationToken = next_token
)
listed_object = resp [ " Contents " ] [ 0 ]
assert listed_object [ " Key " ] == " two "
assert resp [ " MaxKeys " ] == 1
assert resp [ " Prefix " ] == " "
assert resp [ " KeyCount " ] == 1
assert resp [ " IsTruncated " ] == False
2017-02-09 02:21:43 +00:00
# When ContinuationToken is given, StartAfter is ignored. This also means
# AWS does not return it in the response.
2019-10-31 15:44:26 +00:00
assert " StartAfter " not in resp
assert " Delimiter " not in resp
assert " Owner " not in listed_object
2017-02-09 02:21:43 +00:00
@mock_s3
def test_boto3_list_objects_v2_fetch_owner ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
s3 . put_object ( Bucket = " mybucket " , Key = " one " , Body = b " 11 " )
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
resp = s3 . list_objects_v2 ( Bucket = " mybucket " , FetchOwner = True )
owner = resp [ " Contents " ] [ 0 ] [ " Owner " ]
2017-02-09 02:21:43 +00:00
2019-10-31 15:44:26 +00:00
assert " ID " in owner
assert " DisplayName " in owner
2017-02-09 02:21:43 +00:00
assert len ( owner . keys ( ) ) == 2
2016-09-20 22:42:21 +00:00
2019-09-16 08:46:19 +00:00
@mock_s3
def test_boto3_list_objects_v2_truncate_combined_keys_and_folders ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
s3 . put_object ( Bucket = " mybucket " , Key = " 1/2 " , Body = " " )
s3 . put_object ( Bucket = " mybucket " , Key = " 2 " , Body = " " )
s3 . put_object ( Bucket = " mybucket " , Key = " 3/4 " , Body = " " )
s3 . put_object ( Bucket = " mybucket " , Key = " 4 " , Body = " " )
resp = s3 . list_objects_v2 ( Bucket = " mybucket " , Prefix = " " , MaxKeys = 2 , Delimiter = " / " )
assert " Delimiter " in resp
assert resp [ " IsTruncated " ] is True
assert resp [ " KeyCount " ] == 2
assert len ( resp [ " Contents " ] ) == 1
assert resp [ " Contents " ] [ 0 ] [ " Key " ] == " 2 "
assert len ( resp [ " CommonPrefixes " ] ) == 1
assert resp [ " CommonPrefixes " ] [ 0 ] [ " Prefix " ] == " 1/ "
last_tail = resp [ " NextContinuationToken " ]
resp = s3 . list_objects_v2 (
Bucket = " mybucket " , MaxKeys = 2 , Prefix = " " , Delimiter = " / " , StartAfter = last_tail
)
assert resp [ " KeyCount " ] == 2
assert resp [ " IsTruncated " ] is False
assert len ( resp [ " Contents " ] ) == 1
assert resp [ " Contents " ] [ 0 ] [ " Key " ] == " 4 "
assert len ( resp [ " CommonPrefixes " ] ) == 1
assert resp [ " CommonPrefixes " ] [ 0 ] [ " Prefix " ] == " 3/ "
2019-09-16 08:46:19 +00:00
2015-08-02 13:54:23 +00:00
@mock_s3
def test_boto3_bucket_create ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2015-08-02 13:54:23 +00:00
s3 . create_bucket ( Bucket = " blah " )
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . put ( Body = " some text " )
2015-08-02 13:54:23 +00:00
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . get ( ) [ " Body " ] . read ( ) . decode ( " utf-8 " ) . should . equal (
" some text "
)
2015-08-13 21:16:55 +00:00
2018-11-20 17:39:31 +00:00
@mock_s3
def test_bucket_create_force_us_east_1 ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as exc :
2019-10-31 15:44:26 +00:00
s3 . create_bucket (
2020-02-02 11:48:32 +00:00
Bucket = " blah " ,
CreateBucketConfiguration = { " LocationConstraint " : DEFAULT_REGION_NAME } ,
2019-10-31 15:44:26 +00:00
)
2020-10-06 06:04:09 +00:00
exc . value . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidLocationConstraint " )
2018-11-20 17:39:31 +00:00
2015-11-27 19:43:03 +00:00
@mock_s3
def test_boto3_bucket_create_eu_central ( ) :
2019-10-31 15:44:26 +00:00
s3 = boto3 . resource ( " s3 " , region_name = " eu-central-1 " )
2020-02-02 10:36:51 +00:00
s3 . create_bucket (
Bucket = " blah " , CreateBucketConfiguration = { " LocationConstraint " : " eu-central-1 " }
)
2015-11-27 19:43:03 +00:00
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . put ( Body = " some text " )
2015-11-27 19:43:03 +00:00
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . get ( ) [ " Body " ] . read ( ) . decode ( " utf-8 " ) . should . equal (
" some text "
)
2015-11-27 19:43:03 +00:00
2020-06-19 10:44:43 +00:00
@mock_s3
def test_bucket_create_empty_bucket_configuration_should_return_malformed_xml_error ( ) :
s3 = boto3 . resource ( " s3 " , region_name = " us-east-1 " )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2020-06-19 10:44:43 +00:00
s3 . create_bucket ( Bucket = " whatever " , CreateBucketConfiguration = { } )
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " MalformedXML " )
e . value . response [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 400 )
2020-06-19 10:44:43 +00:00
2015-08-13 21:16:55 +00:00
@mock_s3
def test_boto3_head_object ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2015-08-13 21:16:55 +00:00
s3 . create_bucket ( Bucket = " blah " )
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . put ( Body = " some text " )
2015-08-13 21:16:55 +00:00
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . meta . client . head_object (
Bucket = " blah " , Key = " hello.txt "
)
2015-08-13 21:16:55 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello2.txt " ) . meta . client . head_object (
Bucket = " blah " , Key = " hello_bad.txt "
)
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " 404 " )
2015-11-04 23:55:41 +00:00
2017-07-20 00:18:31 +00:00
@mock_s3
def test_boto3_bucket_deletion ( ) :
2020-02-02 10:36:51 +00:00
cli = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-07-20 00:18:31 +00:00
cli . create_bucket ( Bucket = " foobar " )
cli . put_object ( Bucket = " foobar " , Key = " the-key " , Body = " some value " )
# Try to delete a bucket that still has keys
cli . delete_bucket . when . called_with ( Bucket = " foobar " ) . should . throw (
cli . exceptions . ClientError ,
2019-10-31 15:44:26 +00:00
(
" An error occurred (BucketNotEmpty) when calling the DeleteBucket operation: "
" The bucket you tried to delete is not empty "
) ,
)
2017-07-20 00:18:31 +00:00
cli . delete_object ( Bucket = " foobar " , Key = " the-key " )
cli . delete_bucket ( Bucket = " foobar " )
# Get non-existing bucket
cli . head_bucket . when . called_with ( Bucket = " foobar " ) . should . throw (
cli . exceptions . ClientError ,
2019-10-31 15:44:26 +00:00
" An error occurred (404) when calling the HeadBucket operation: Not Found " ,
)
2017-07-20 00:18:31 +00:00
# Delete non-existing bucket
2019-10-31 15:44:26 +00:00
cli . delete_bucket . when . called_with ( Bucket = " foobar " ) . should . throw (
cli . exceptions . NoSuchBucket
)
2017-07-20 00:18:31 +00:00
2016-07-08 19:32:34 +00:00
@mock_s3
def test_boto3_get_object ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2016-07-08 19:32:34 +00:00
s3 . create_bucket ( Bucket = " blah " )
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . put ( Body = " some text " )
2016-07-08 19:32:34 +00:00
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . meta . client . head_object (
Bucket = " blah " , Key = " hello.txt "
)
2016-07-08 19:32:34 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello2.txt " ) . get ( )
2016-07-08 19:32:34 +00:00
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchKey " )
2016-07-08 19:32:34 +00:00
2020-03-10 12:56:33 +00:00
@mock_s3
def test_boto3_s3_content_type ( ) :
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
my_bucket = s3 . Bucket ( " my-cool-bucket " )
my_bucket . create ( )
2020-03-10 13:30:38 +00:00
s3_path = " test_s3.py "
2020-03-10 12:56:33 +00:00
s3 = boto3 . resource ( " s3 " , verify = False )
2020-03-10 13:30:38 +00:00
content_type = " text/python-x "
s3 . Object ( my_bucket . name , s3_path ) . put (
2021-10-12 17:50:36 +00:00
ContentType = content_type , Body = b " some python code "
2020-03-10 13:30:38 +00:00
)
2020-03-10 12:56:33 +00:00
2020-03-10 13:30:38 +00:00
s3 . Object ( my_bucket . name , s3_path ) . content_type . should . equal ( content_type )
2020-03-10 12:56:33 +00:00
2019-10-16 09:13:59 +00:00
@mock_s3
def test_boto3_get_missing_object_with_part_number ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-16 09:13:59 +00:00
s3 . create_bucket ( Bucket = " blah " )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2019-10-31 15:44:26 +00:00
s3 . Object ( " blah " , " hello.txt " ) . meta . client . head_object (
Bucket = " blah " , Key = " hello.txt " , PartNumber = 123
)
2019-10-16 09:13:59 +00:00
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " 404 " )
2019-10-16 09:13:59 +00:00
2016-04-28 20:17:33 +00:00
@mock_s3
def test_boto3_head_object_with_versioning ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket = s3 . create_bucket ( Bucket = " blah " )
2016-04-28 20:17:33 +00:00
bucket . Versioning ( ) . enable ( )
2019-10-31 15:44:26 +00:00
old_content = " some text "
new_content = " some new text "
s3 . Object ( " blah " , " hello.txt " ) . put ( Body = old_content )
s3 . Object ( " blah " , " hello.txt " ) . put ( Body = new_content )
2016-04-28 20:17:33 +00:00
2019-10-31 15:44:26 +00:00
versions = list ( s3 . Bucket ( " blah " ) . object_versions . all ( ) )
2019-03-08 21:01:27 +00:00
latest = list ( filter ( lambda item : item . is_latest , versions ) ) [ 0 ]
oldest = list ( filter ( lambda item : not item . is_latest , versions ) ) [ 0 ]
2019-10-31 15:44:26 +00:00
head_object = s3 . Object ( " blah " , " hello.txt " ) . meta . client . head_object (
Bucket = " blah " , Key = " hello.txt "
)
head_object [ " VersionId " ] . should . equal ( latest . id )
head_object [ " ContentLength " ] . should . equal ( len ( new_content ) )
2016-04-28 20:17:33 +00:00
2019-10-31 15:44:26 +00:00
old_head_object = s3 . Object ( " blah " , " hello.txt " ) . meta . client . head_object (
Bucket = " blah " , Key = " hello.txt " , VersionId = oldest . id
)
old_head_object [ " VersionId " ] . should . equal ( oldest . id )
old_head_object [ " ContentLength " ] . should . equal ( len ( old_content ) )
2016-04-28 20:17:33 +00:00
2019-10-31 15:44:26 +00:00
old_head_object [ " VersionId " ] . should_not . equal ( head_object [ " VersionId " ] )
2019-03-08 21:01:27 +00:00
2016-04-28 20:17:33 +00:00
2021-11-14 17:16:58 +00:00
@mock_s3
def test_boto3_copy_non_existing_file ( ) :
2021-11-18 20:57:12 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2021-11-14 17:16:58 +00:00
src = " srcbucket "
target = " target "
s3 . create_bucket ( Bucket = src )
s3 . create_bucket ( Bucket = target )
s3_client = boto3 . client ( " s3 " )
with pytest . raises ( ClientError ) as exc :
s3_client . copy_object (
Bucket = target , CopySource = { " Bucket " : src , " Key " : " foofoofoo " } , Key = " newkey "
)
err = exc . value . response [ " Error " ]
err [ " Code " ] . should . equal ( " NoSuchKey " )
err [ " Message " ] . should . equal ( " The specified key does not exist. " )
err [ " Key " ] . should . equal ( " foofoofoo " )
2017-10-18 23:22:35 +00:00
@mock_s3
def test_boto3_copy_object_with_versioning ( ) :
2020-02-02 10:36:51 +00:00
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-10-18 23:22:35 +00:00
2019-10-31 15:44:26 +00:00
client . create_bucket (
Bucket = " blah " , CreateBucketConfiguration = { " LocationConstraint " : " eu-west-1 " }
)
client . put_bucket_versioning (
Bucket = " blah " , VersioningConfiguration = { " Status " : " Enabled " }
)
2017-10-18 23:22:35 +00:00
2019-10-31 15:44:26 +00:00
client . put_object ( Bucket = " blah " , Key = " test1 " , Body = b " test1 " )
client . put_object ( Bucket = " blah " , Key = " test2 " , Body = b " test2 " )
2017-10-18 23:22:35 +00:00
2021-10-18 19:44:29 +00:00
client . get_object ( Bucket = " blah " , Key = " test1 " ) [ " VersionId " ]
2019-10-31 15:44:26 +00:00
obj2_version = client . get_object ( Bucket = " blah " , Key = " test2 " ) [ " VersionId " ]
2017-10-18 23:22:35 +00:00
2019-10-31 15:44:26 +00:00
client . copy_object (
CopySource = { " Bucket " : " blah " , " Key " : " test1 " } , Bucket = " blah " , Key = " test2 "
)
obj2_version_new = client . get_object ( Bucket = " blah " , Key = " test2 " ) [ " VersionId " ]
2017-10-18 23:22:35 +00:00
# Version should be different to previous version
obj2_version_new . should_not . equal ( obj2_version )
2019-10-31 15:44:26 +00:00
client . copy_object (
CopySource = { " Bucket " : " blah " , " Key " : " test2 " , " VersionId " : obj2_version } ,
Bucket = " blah " ,
Key = " test3 " ,
)
obj3_version_new = client . get_object ( Bucket = " blah " , Key = " test3 " ) [ " VersionId " ]
2019-05-25 19:19:33 +00:00
obj3_version_new . should_not . equal ( obj2_version_new )
# Copy file that doesn't exist
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2019-10-31 15:44:26 +00:00
client . copy_object (
CopySource = { " Bucket " : " blah " , " Key " : " test4 " , " VersionId " : obj2_version } ,
Bucket = " blah " ,
Key = " test5 " ,
)
2021-11-14 17:16:58 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchKey " )
2019-10-31 15:44:26 +00:00
response = client . create_multipart_upload ( Bucket = " blah " , Key = " test4 " )
upload_id = response [ " UploadId " ]
response = client . upload_part_copy (
Bucket = " blah " ,
Key = " test4 " ,
CopySource = { " Bucket " : " blah " , " Key " : " test3 " , " VersionId " : obj3_version_new } ,
UploadId = upload_id ,
PartNumber = 1 ,
)
2019-05-25 19:19:33 +00:00
etag = response [ " CopyPartResult " ] [ " ETag " ]
client . complete_multipart_upload (
2019-10-31 15:44:26 +00:00
Bucket = " blah " ,
Key = " test4 " ,
UploadId = upload_id ,
MultipartUpload = { " Parts " : [ { " ETag " : etag , " PartNumber " : 1 } ] } ,
)
2019-05-25 19:19:33 +00:00
2019-10-31 15:44:26 +00:00
response = client . get_object ( Bucket = " blah " , Key = " test4 " )
2019-05-25 19:19:33 +00:00
data = response [ " Body " ] . read ( )
2019-10-31 15:44:26 +00:00
data . should . equal ( b " test2 " )
2019-05-25 19:19:33 +00:00
2017-10-18 23:22:35 +00:00
2020-05-12 13:59:07 +00:00
@mock_s3
def test_s3_abort_multipart_data_with_invalid_upload_and_key ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " blah " )
2020-10-06 05:54:49 +00:00
with pytest . raises ( Exception ) as err :
2020-05-12 13:59:07 +00:00
client . abort_multipart_upload (
Bucket = " blah " , Key = " foobar " , UploadId = " dummy_upload_id "
)
2021-06-24 07:14:28 +00:00
err = err . value . response [ " Error " ]
err [ " Code " ] . should . equal ( " NoSuchUpload " )
err [ " Message " ] . should . equal (
" The specified upload does not exist. The upload ID may be invalid, or the upload may have been aborted or completed. "
)
err [ " UploadId " ] . should . equal ( " dummy_upload_id " )
2020-05-12 13:59:07 +00:00
2019-04-02 13:30:01 +00:00
@mock_s3
def test_boto3_copy_object_from_unversioned_to_versioned_bucket ( ) :
2020-02-02 10:36:51 +00:00
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-04-02 13:30:01 +00:00
2019-10-31 15:44:26 +00:00
client . create_bucket (
Bucket = " src " , CreateBucketConfiguration = { " LocationConstraint " : " eu-west-1 " }
)
client . create_bucket (
Bucket = " dest " , CreateBucketConfiguration = { " LocationConstraint " : " eu-west-1 " }
)
client . put_bucket_versioning (
Bucket = " dest " , VersioningConfiguration = { " Status " : " Enabled " }
)
2019-04-02 13:30:01 +00:00
2019-10-31 15:44:26 +00:00
client . put_object ( Bucket = " src " , Key = " test " , Body = b " content " )
2019-04-02 13:30:01 +00:00
2019-10-31 15:44:26 +00:00
obj2_version_new = client . copy_object (
CopySource = { " Bucket " : " src " , " Key " : " test " } , Bucket = " dest " , Key = " test "
) . get ( " VersionId " )
2019-04-02 13:30:01 +00:00
# VersionId should be present in the response
obj2_version_new . should_not . equal ( None )
2019-03-28 14:10:57 +00:00
@mock_s3
def test_boto3_copy_object_with_replacement_tagging ( ) :
2020-02-18 02:32:28 +00:00
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-03-28 14:10:57 +00:00
client . create_bucket ( Bucket = " mybucket " )
client . put_object (
Bucket = " mybucket " , Key = " original " , Body = b " test " , Tagging = " tag=old "
)
2021-10-15 19:10:28 +00:00
# using system tags will fail
with pytest . raises ( ClientError ) as err :
client . copy_object (
CopySource = { " Bucket " : " mybucket " , " Key " : " original " } ,
Bucket = " mybucket " ,
Key = " copy1 " ,
TaggingDirective = " REPLACE " ,
Tagging = " aws:tag=invalid_key " ,
)
e = err . value
e . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidTag " )
2019-03-28 14:10:57 +00:00
client . copy_object (
CopySource = { " Bucket " : " mybucket " , " Key " : " original " } ,
Bucket = " mybucket " ,
Key = " copy1 " ,
TaggingDirective = " REPLACE " ,
Tagging = " tag=new " ,
)
client . copy_object (
CopySource = { " Bucket " : " mybucket " , " Key " : " original " } ,
Bucket = " mybucket " ,
Key = " copy2 " ,
TaggingDirective = " COPY " ,
)
tags1 = client . get_object_tagging ( Bucket = " mybucket " , Key = " copy1 " ) [ " TagSet " ]
tags1 . should . equal ( [ { " Key " : " tag " , " Value " : " new " } ] )
tags2 = client . get_object_tagging ( Bucket = " mybucket " , Key = " copy2 " ) [ " TagSet " ]
tags2 . should . equal ( [ { " Key " : " tag " , " Value " : " old " } ] )
2018-03-06 09:50:22 +00:00
@mock_s3
def test_boto3_deleted_versionings_list ( ) :
2020-02-02 10:36:51 +00:00
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2018-03-06 09:50:22 +00:00
2019-10-31 15:44:26 +00:00
client . create_bucket ( Bucket = " blah " )
client . put_bucket_versioning (
Bucket = " blah " , VersioningConfiguration = { " Status " : " Enabled " }
)
2018-03-06 09:50:22 +00:00
2019-10-31 15:44:26 +00:00
client . put_object ( Bucket = " blah " , Key = " test1 " , Body = b " test1 " )
client . put_object ( Bucket = " blah " , Key = " test2 " , Body = b " test2 " )
client . delete_objects ( Bucket = " blah " , Delete = { " Objects " : [ { " Key " : " test1 " } ] } )
2018-03-06 09:50:22 +00:00
2019-10-31 15:44:26 +00:00
listed = client . list_objects_v2 ( Bucket = " blah " )
assert len ( listed [ " Contents " ] ) == 1
2018-03-06 09:50:22 +00:00
2020-04-22 17:31:43 +00:00
@mock_s3
def test_boto3_delete_objects_for_specific_version_id ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " blah " )
client . put_bucket_versioning (
Bucket = " blah " , VersioningConfiguration = { " Status " : " Enabled " }
)
client . put_object ( Bucket = " blah " , Key = " test1 " , Body = b " test1a " )
client . put_object ( Bucket = " blah " , Key = " test1 " , Body = b " test1b " )
response = client . list_object_versions ( Bucket = " blah " , Prefix = " test1 " )
id_to_delete = [ v [ " VersionId " ] for v in response [ " Versions " ] if v [ " IsLatest " ] ] [ 0 ]
response = client . delete_objects (
Bucket = " blah " , Delete = { " Objects " : [ { " Key " : " test1 " , " VersionId " : id_to_delete } ] }
)
assert response [ " Deleted " ] == [ { " Key " : " test1 " , " VersionId " : id_to_delete } ]
listed = client . list_objects_v2 ( Bucket = " blah " )
assert len ( listed [ " Contents " ] ) == 1
2018-05-03 09:30:29 +00:00
@mock_s3
def test_boto3_delete_versioned_bucket ( ) :
2020-02-02 10:36:51 +00:00
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2018-05-03 09:30:29 +00:00
2019-10-31 15:44:26 +00:00
client . create_bucket ( Bucket = " blah " )
client . put_bucket_versioning (
Bucket = " blah " , VersioningConfiguration = { " Status " : " Enabled " }
)
2018-05-03 09:30:29 +00:00
2019-10-31 15:44:26 +00:00
resp = client . put_object ( Bucket = " blah " , Key = " test1 " , Body = b " test1 " )
client . delete_object ( Bucket = " blah " , Key = " test1 " , VersionId = resp [ " VersionId " ] )
2018-05-03 09:30:29 +00:00
2019-10-31 15:44:26 +00:00
client . delete_bucket ( Bucket = " blah " )
2018-05-03 09:30:29 +00:00
2019-09-24 00:16:20 +00:00
2020-09-15 12:29:09 +00:00
@mock_s3
def test_boto3_delete_versioned_bucket_returns_meta ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " blah " )
client . put_bucket_versioning (
Bucket = " blah " , VersioningConfiguration = { " Status " : " Enabled " }
)
2021-10-18 19:44:29 +00:00
client . put_object ( Bucket = " blah " , Key = " test1 " , Body = b " test1 " )
2020-09-15 12:29:09 +00:00
# Delete the object
del_resp = client . delete_object ( Bucket = " blah " , Key = " test1 " )
assert " DeleteMarker " not in del_resp
assert del_resp [ " VersionId " ] is not None
# Delete the delete marker
del_resp2 = client . delete_object (
Bucket = " blah " , Key = " test1 " , VersionId = del_resp [ " VersionId " ]
)
assert del_resp2 [ " DeleteMarker " ] == True
assert " VersionId " not in del_resp2
2019-07-03 15:35:56 +00:00
@mock_s3
def test_boto3_get_object_if_modified_since ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-07-03 15:35:56 +00:00
bucket_name = " blah "
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
key = " hello.txt "
2019-07-03 15:35:56 +00:00
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2019-07-03 15:35:56 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( botocore . exceptions . ClientError ) as err :
2019-07-03 15:35:56 +00:00
s3 . get_object (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
IfModifiedSince = datetime . datetime . utcnow ( ) + datetime . timedelta ( hours = 1 ) ,
2019-07-03 15:35:56 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2019-10-31 15:44:26 +00:00
e . response [ " Error " ] . should . equal ( { " Code " : " 304 " , " Message " : " Not Modified " } )
2018-05-03 09:30:29 +00:00
2019-09-24 00:16:20 +00:00
2020-09-11 09:17:39 +00:00
@mock_s3
def test_boto3_get_object_if_unmodified_since ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " blah "
s3 . create_bucket ( Bucket = bucket_name )
key = " hello.txt "
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2020-10-06 06:04:09 +00:00
with pytest . raises ( botocore . exceptions . ClientError ) as err :
2020-09-11 09:17:39 +00:00
s3 . get_object (
Bucket = bucket_name ,
Key = key ,
IfUnmodifiedSince = datetime . datetime . utcnow ( ) - datetime . timedelta ( hours = 1 ) ,
)
2020-10-06 06:04:09 +00:00
e = err . value
2020-09-11 09:17:39 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " PreconditionFailed " )
e . response [ " Error " ] [ " Condition " ] . should . equal ( " If-Unmodified-Since " )
@mock_s3
def test_boto3_get_object_if_match ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " blah "
s3 . create_bucket ( Bucket = bucket_name )
key = " hello.txt "
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2020-10-06 06:04:09 +00:00
with pytest . raises ( botocore . exceptions . ClientError ) as err :
2020-09-11 09:17:39 +00:00
s3 . get_object (
2020-11-11 15:55:37 +00:00
Bucket = bucket_name , Key = key , IfMatch = ' " hello " ' ,
2020-09-11 09:17:39 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2020-09-11 09:17:39 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " PreconditionFailed " )
e . response [ " Error " ] [ " Condition " ] . should . equal ( " If-Match " )
@mock_s3
def test_boto3_get_object_if_none_match ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " blah "
s3 . create_bucket ( Bucket = bucket_name )
key = " hello.txt "
etag = s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " ) [ " ETag " ]
2020-10-06 06:04:09 +00:00
with pytest . raises ( botocore . exceptions . ClientError ) as err :
2020-09-11 09:17:39 +00:00
s3 . get_object (
2020-11-11 15:55:37 +00:00
Bucket = bucket_name , Key = key , IfNoneMatch = etag ,
2020-09-11 09:17:39 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2020-09-11 09:17:39 +00:00
e . response [ " Error " ] . should . equal ( { " Code " : " 304 " , " Message " : " Not Modified " } )
2017-05-19 22:59:25 +00:00
@mock_s3
def test_boto3_head_object_if_modified_since ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-05-19 22:59:25 +00:00
bucket_name = " blah "
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
key = " hello.txt "
2017-05-19 22:59:25 +00:00
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2017-05-19 22:59:25 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( botocore . exceptions . ClientError ) as err :
2017-05-19 22:59:25 +00:00
s3 . head_object (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
IfModifiedSince = datetime . datetime . utcnow ( ) + datetime . timedelta ( hours = 1 ) ,
2017-05-19 22:59:25 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2019-10-31 15:44:26 +00:00
e . response [ " Error " ] . should . equal ( { " Code " : " 304 " , " Message " : " Not Modified " } )
2017-05-19 22:59:25 +00:00
2020-09-11 09:17:39 +00:00
@mock_s3
def test_boto3_head_object_if_unmodified_since ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " blah "
s3 . create_bucket ( Bucket = bucket_name )
key = " hello.txt "
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2020-10-06 06:04:09 +00:00
with pytest . raises ( botocore . exceptions . ClientError ) as err :
2020-09-11 09:17:39 +00:00
s3 . head_object (
Bucket = bucket_name ,
Key = key ,
IfUnmodifiedSince = datetime . datetime . utcnow ( ) - datetime . timedelta ( hours = 1 ) ,
)
2020-10-06 06:04:09 +00:00
e = err . value
2020-09-11 09:17:39 +00:00
e . response [ " Error " ] . should . equal ( { " Code " : " 412 " , " Message " : " Precondition Failed " } )
@mock_s3
def test_boto3_head_object_if_match ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " blah "
s3 . create_bucket ( Bucket = bucket_name )
key = " hello.txt "
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2020-10-06 06:04:09 +00:00
with pytest . raises ( botocore . exceptions . ClientError ) as err :
2020-09-11 09:17:39 +00:00
s3 . head_object (
2020-11-11 15:55:37 +00:00
Bucket = bucket_name , Key = key , IfMatch = ' " hello " ' ,
2020-09-11 09:17:39 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2020-09-11 09:17:39 +00:00
e . response [ " Error " ] . should . equal ( { " Code " : " 412 " , " Message " : " Precondition Failed " } )
@mock_s3
def test_boto3_head_object_if_none_match ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " blah "
s3 . create_bucket ( Bucket = bucket_name )
key = " hello.txt "
etag = s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " ) [ " ETag " ]
2020-10-06 06:04:09 +00:00
with pytest . raises ( botocore . exceptions . ClientError ) as err :
2020-09-11 09:17:39 +00:00
s3 . head_object (
2020-11-11 15:55:37 +00:00
Bucket = bucket_name , Key = key , IfNoneMatch = etag ,
2020-09-11 09:17:39 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2020-09-11 09:17:39 +00:00
e . response [ " Error " ] . should . equal ( { " Code " : " 304 " , " Message " : " Not Modified " } )
2016-09-20 22:42:21 +00:00
@mock_s3
@reduced_min_part_size
def test_boto3_multipart_etag ( ) :
# Create Bucket so that test can run
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
2016-09-20 22:42:21 +00:00
2019-10-31 15:44:26 +00:00
upload_id = s3 . create_multipart_upload ( Bucket = " mybucket " , Key = " the-key " ) [ " UploadId " ]
part1 = b " 0 " * REDUCED_PART_SIZE
2016-09-20 22:42:21 +00:00
etags = [ ]
etags . append (
2019-10-31 15:44:26 +00:00
s3 . upload_part (
Bucket = " mybucket " ,
Key = " the-key " ,
PartNumber = 1 ,
UploadId = upload_id ,
Body = part1 ,
) [ " ETag " ]
)
2016-09-20 22:42:21 +00:00
# last part, can be less than 5 MB
2019-10-31 15:44:26 +00:00
part2 = b " 1 "
2016-09-20 22:42:21 +00:00
etags . append (
2019-10-31 15:44:26 +00:00
s3 . upload_part (
Bucket = " mybucket " ,
Key = " the-key " ,
PartNumber = 2 ,
UploadId = upload_id ,
Body = part2 ,
) [ " ETag " ]
)
2021-08-21 14:05:40 +00:00
2016-09-20 22:42:21 +00:00
s3 . complete_multipart_upload (
2019-10-31 15:44:26 +00:00
Bucket = " mybucket " ,
Key = " the-key " ,
UploadId = upload_id ,
MultipartUpload = {
" Parts " : [
{ " ETag " : etag , " PartNumber " : i } for i , etag in enumerate ( etags , 1 )
]
} ,
)
2016-09-20 22:42:21 +00:00
# we should get both parts as the key contents
2019-10-31 15:44:26 +00:00
resp = s3 . get_object ( Bucket = " mybucket " , Key = " the-key " )
resp [ " ETag " ] . should . equal ( EXPECTED_ETAG )
2016-09-20 22:42:21 +00:00
2021-02-02 09:51:17 +00:00
@mock_s3
@reduced_min_part_size
def test_boto3_multipart_version ( ) :
# Create Bucket so that test can run
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = " mybucket " )
s3 . put_bucket_versioning (
Bucket = " mybucket " , VersioningConfiguration = { " Status " : " Enabled " }
)
upload_id = s3 . create_multipart_upload ( Bucket = " mybucket " , Key = " the-key " ) [ " UploadId " ]
part1 = b " 0 " * REDUCED_PART_SIZE
etags = [ ]
etags . append (
s3 . upload_part (
Bucket = " mybucket " ,
Key = " the-key " ,
PartNumber = 1 ,
UploadId = upload_id ,
Body = part1 ,
) [ " ETag " ]
)
# last part, can be less than 5 MB
part2 = b " 1 "
etags . append (
s3 . upload_part (
Bucket = " mybucket " ,
Key = " the-key " ,
PartNumber = 2 ,
UploadId = upload_id ,
Body = part2 ,
) [ " ETag " ]
)
response = s3 . complete_multipart_upload (
Bucket = " mybucket " ,
Key = " the-key " ,
UploadId = upload_id ,
MultipartUpload = {
" Parts " : [
{ " ETag " : etag , " PartNumber " : i } for i , etag in enumerate ( etags , 1 )
]
} ,
)
response [ " VersionId " ] . should . should_not . be . none
2021-08-28 06:38:16 +00:00
@mock_s3
2021-11-23 19:47:48 +00:00
@pytest.mark.parametrize (
" part_nr,msg,msg2 " ,
[
(
- 42 ,
" Argument max-parts must be an integer between 0 and 2147483647 " ,
" Argument part-number-marker must be an integer between 0 and 2147483647 " ,
) ,
(
2147483647 + 42 ,
" Provided max-parts not an integer or within integer range " ,
" Provided part-number-marker not an integer or within integer range " ,
) ,
] ,
)
def test_boto3_multipart_list_parts_invalid_argument ( part_nr , msg , msg2 ) :
2021-08-28 06:38:16 +00:00
s3 = boto3 . client ( " s3 " , region_name = " us-east-1 " )
2021-11-23 19:47:48 +00:00
bucket_name = " mybucketasdfljoqwerasdfas "
s3 . create_bucket ( Bucket = bucket_name )
2021-08-28 06:38:16 +00:00
2021-11-23 19:47:48 +00:00
mpu = s3 . create_multipart_upload ( Bucket = bucket_name , Key = " the-key " )
2021-08-28 06:38:16 +00:00
mpu_id = mpu [ " UploadId " ]
def get_parts ( * * kwarg ) :
2021-11-23 19:47:48 +00:00
s3 . list_parts ( Bucket = bucket_name , Key = " the-key " , UploadId = mpu_id , * * kwarg )
2021-08-28 06:38:16 +00:00
2021-11-23 19:47:48 +00:00
with pytest . raises ( ClientError ) as err :
get_parts ( * * { " MaxParts " : part_nr } )
e = err . value . response [ " Error " ]
e [ " Code " ] . should . equal ( " InvalidArgument " )
e [ " Message " ] . should . equal ( msg )
with pytest . raises ( ClientError ) as err :
get_parts ( * * { " PartNumberMarker " : part_nr } )
e = err . value . response [ " Error " ]
e [ " Code " ] . should . equal ( " InvalidArgument " )
e [ " Message " ] . should . equal ( msg2 )
2021-08-28 06:38:16 +00:00
@mock_s3
@reduced_min_part_size
def test_boto3_multipart_list_parts ( ) :
s3 = boto3 . client ( " s3 " , region_name = " us-east-1 " )
2021-11-23 19:47:48 +00:00
bucket_name = " mybucketasdfljoqwerasdfas "
s3 . create_bucket ( Bucket = bucket_name )
2021-08-28 06:38:16 +00:00
2021-11-23 19:47:48 +00:00
mpu = s3 . create_multipart_upload ( Bucket = bucket_name , Key = " the-key " )
2021-08-28 06:38:16 +00:00
mpu_id = mpu [ " UploadId " ]
parts = [ ]
n_parts = 10
def get_parts_all ( i ) :
# Get uploaded parts using default values
uploaded_parts = [ ]
2021-11-23 19:47:48 +00:00
uploaded = s3 . list_parts ( Bucket = bucket_name , Key = " the-key " , UploadId = mpu_id , )
2021-08-28 06:38:16 +00:00
assert uploaded [ " PartNumberMarker " ] == 0
# Parts content check
if i > 0 :
for part in uploaded [ " Parts " ] :
uploaded_parts . append (
{ " ETag " : part [ " ETag " ] , " PartNumber " : part [ " PartNumber " ] }
)
assert uploaded_parts == parts
2021-11-23 19:47:48 +00:00
next_part_number_marker = uploaded [ " Parts " ] [ - 1 ] [ " PartNumber " ]
2021-08-28 06:38:16 +00:00
else :
next_part_number_marker = 0
assert uploaded [ " NextPartNumberMarker " ] == next_part_number_marker
assert not uploaded [ " IsTruncated " ]
def get_parts_by_batch ( i ) :
# Get uploaded parts by batch of 2
part_number_marker = 0
uploaded_parts = [ ]
while " there are parts " :
uploaded = s3 . list_parts (
2021-11-23 19:47:48 +00:00
Bucket = bucket_name ,
2021-08-28 06:38:16 +00:00
Key = " the-key " ,
UploadId = mpu_id ,
PartNumberMarker = part_number_marker ,
MaxParts = 2 ,
)
assert uploaded [ " PartNumberMarker " ] == part_number_marker
if i > 0 :
# We should received maximum 2 parts
assert len ( uploaded [ " Parts " ] ) < = 2
# Store parts content for the final check
for part in uploaded [ " Parts " ] :
uploaded_parts . append (
{ " ETag " : part [ " ETag " ] , " PartNumber " : part [ " PartNumber " ] }
)
# No more parts, get out the loop
if not uploaded [ " IsTruncated " ] :
break
# Next parts batch will start with that number
part_number_marker = uploaded [ " NextPartNumberMarker " ]
assert part_number_marker == i + 1 if len ( parts ) > i else i
# Final check: we received all uploaded parts
assert uploaded_parts == parts
# Check ListParts API parameters when no part was uploaded
get_parts_all ( 0 )
get_parts_by_batch ( 0 )
for i in range ( 1 , n_parts + 1 ) :
part_size = REDUCED_PART_SIZE + i
body = b " 1 " * part_size
part = s3 . upload_part (
2021-11-23 19:47:48 +00:00
Bucket = bucket_name ,
2021-08-28 06:38:16 +00:00
Key = " the-key " ,
PartNumber = i ,
UploadId = mpu_id ,
Body = body ,
ContentLength = len ( body ) ,
)
parts . append ( { " PartNumber " : i , " ETag " : part [ " ETag " ] } )
# Check ListParts API parameters while there are uploaded parts
get_parts_all ( i )
get_parts_by_batch ( i )
# Check ListParts API parameters when all parts were uploaded
get_parts_all ( 11 )
get_parts_by_batch ( 11 )
s3 . complete_multipart_upload (
2021-11-23 19:47:48 +00:00
Bucket = bucket_name ,
2021-08-28 06:38:16 +00:00
Key = " the-key " ,
UploadId = mpu_id ,
MultipartUpload = { " Parts " : parts } ,
)
2019-07-15 17:08:15 +00:00
@mock_s3
@reduced_min_part_size
def test_boto3_multipart_part_size ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " mybucket " )
2019-07-15 17:08:15 +00:00
2019-10-31 15:44:26 +00:00
mpu = s3 . create_multipart_upload ( Bucket = " mybucket " , Key = " the-key " )
2019-07-15 17:08:15 +00:00
mpu_id = mpu [ " UploadId " ]
parts = [ ]
n_parts = 10
for i in range ( 1 , n_parts + 1 ) :
2019-07-20 13:26:24 +00:00
part_size = REDUCED_PART_SIZE + i
2019-10-31 15:44:26 +00:00
body = b " 1 " * part_size
2019-07-15 17:08:15 +00:00
part = s3 . upload_part (
2019-10-31 15:44:26 +00:00
Bucket = " mybucket " ,
Key = " the-key " ,
2019-07-15 17:08:15 +00:00
PartNumber = i ,
UploadId = mpu_id ,
Body = body ,
ContentLength = len ( body ) ,
)
parts . append ( { " PartNumber " : i , " ETag " : part [ " ETag " ] } )
s3 . complete_multipart_upload (
2019-10-31 15:44:26 +00:00
Bucket = " mybucket " ,
Key = " the-key " ,
2019-07-15 17:08:15 +00:00
UploadId = mpu_id ,
MultipartUpload = { " Parts " : parts } ,
)
for i in range ( 1 , n_parts + 1 ) :
2019-10-31 15:44:26 +00:00
obj = s3 . head_object ( Bucket = " mybucket " , Key = " the-key " , PartNumber = i )
2019-07-20 13:26:24 +00:00
assert obj [ " ContentLength " ] == REDUCED_PART_SIZE + i
2019-07-15 17:08:15 +00:00
2017-07-16 02:36:12 +00:00
@mock_s3
def test_boto3_put_object_with_tagging ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-tags "
2017-07-16 02:36:12 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2021-10-15 19:10:28 +00:00
# using system tags will fail
with pytest . raises ( ClientError ) as err :
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " , Tagging = " aws:foo=bar " )
e = err . value
e . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidTag " )
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " , Tagging = " foo=bar " )
2017-07-16 02:36:12 +00:00
2020-06-20 11:15:29 +00:00
s3 . get_object_tagging ( Bucket = bucket_name , Key = key ) [ " TagSet " ] . should . contain (
{ " Key " : " foo " , " Value " : " bar " }
)
2022-01-25 19:25:39 +00:00
resp = s3 . get_object ( Bucket = bucket_name , Key = key )
resp . should . have . key ( " TagCount " ) . equals ( 1 )
2020-06-20 11:15:29 +00:00
s3 . delete_object_tagging ( Bucket = bucket_name , Key = key )
2017-07-16 02:36:12 +00:00
2020-06-20 11:15:29 +00:00
s3 . get_object_tagging ( Bucket = bucket_name , Key = key ) [ " TagSet " ] . should . equal ( [ ] )
2017-07-16 02:36:12 +00:00
2017-09-07 18:30:05 +00:00
@mock_s3
def test_boto3_put_bucket_tagging ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-09-07 18:30:05 +00:00
bucket_name = " mybucket "
s3 . create_bucket ( Bucket = bucket_name )
2017-09-22 20:35:00 +00:00
# With 1 tag:
2019-10-31 15:44:26 +00:00
resp = s3 . put_bucket_tagging (
Bucket = bucket_name , Tagging = { " TagSet " : [ { " Key " : " TagOne " , " Value " : " ValueOne " } ] }
)
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2017-09-22 20:35:00 +00:00
# With multiple tags:
2019-10-31 15:44:26 +00:00
resp = s3 . put_bucket_tagging (
Bucket = bucket_name ,
Tagging = {
" TagSet " : [
{ " Key " : " TagOne " , " Value " : " ValueOne " } ,
{ " Key " : " TagTwo " , " Value " : " ValueTwo " } ,
]
} ,
)
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2017-09-07 18:30:05 +00:00
# No tags is also OK:
2019-10-31 15:44:26 +00:00
resp = s3 . put_bucket_tagging ( Bucket = bucket_name , Tagging = { " TagSet " : [ ] } )
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2017-09-07 18:30:05 +00:00
2018-11-26 23:56:46 +00:00
# With duplicate tag keys:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
resp = s3 . put_bucket_tagging (
Bucket = bucket_name ,
Tagging = {
" TagSet " : [
{ " Key " : " TagOne " , " Value " : " ValueOne " } ,
{ " Key " : " TagOne " , " Value " : " ValueOneAgain " } ,
]
} ,
)
2020-10-06 06:04:09 +00:00
e = err . value
2018-11-26 23:56:46 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidTag " )
2019-10-31 15:44:26 +00:00
e . response [ " Error " ] [ " Message " ] . should . equal (
" Cannot provide multiple Tags with the same key "
)
2017-09-07 18:30:05 +00:00
2020-03-31 00:23:33 +00:00
# Cannot put tags that are "system" tags - i.e. tags that start with "aws:"
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as ce :
2020-03-31 00:23:33 +00:00
s3 . put_bucket_tagging (
Bucket = bucket_name ,
Tagging = { " TagSet " : [ { " Key " : " aws:sometag " , " Value " : " nope " } ] } ,
)
2020-10-06 06:04:09 +00:00
e = ce . value
2020-03-31 00:23:33 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidTag " )
e . response [ " Error " ] [ " Message " ] . should . equal (
" System tags cannot be added/updated by requester "
)
# This is OK though:
s3 . put_bucket_tagging (
Bucket = bucket_name ,
Tagging = { " TagSet " : [ { " Key " : " something:aws:stuff " , " Value " : " this is fine " } ] } ,
)
2019-09-24 00:16:20 +00:00
2017-09-07 18:30:05 +00:00
@mock_s3
def test_boto3_get_bucket_tagging ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-09-07 18:30:05 +00:00
bucket_name = " mybucket "
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
s3 . put_bucket_tagging (
Bucket = bucket_name ,
Tagging = {
" TagSet " : [
{ " Key " : " TagOne " , " Value " : " ValueOne " } ,
{ " Key " : " TagTwo " , " Value " : " ValueTwo " } ,
]
} ,
)
2017-09-07 18:30:05 +00:00
# Get the tags for the bucket:
resp = s3 . get_bucket_tagging ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2017-09-07 18:30:05 +00:00
len ( resp [ " TagSet " ] ) . should . equal ( 2 )
# With no tags:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_tagging ( Bucket = bucket_name , Tagging = { " TagSet " : [ ] } )
2017-09-07 18:30:05 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2017-09-07 18:30:05 +00:00
s3 . get_bucket_tagging ( Bucket = bucket_name )
2020-10-06 06:04:09 +00:00
e = err . value
2017-09-07 18:30:05 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchTagSet " )
e . response [ " Error " ] [ " Message " ] . should . equal ( " The TagSet does not exist " )
@mock_s3
def test_boto3_delete_bucket_tagging ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-09-07 18:30:05 +00:00
bucket_name = " mybucket "
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
s3 . put_bucket_tagging (
Bucket = bucket_name ,
Tagging = {
" TagSet " : [
{ " Key " : " TagOne " , " Value " : " ValueOne " } ,
{ " Key " : " TagTwo " , " Value " : " ValueTwo " } ,
]
} ,
)
2017-09-07 18:30:05 +00:00
resp = s3 . delete_bucket_tagging ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 204 )
2017-09-07 18:30:05 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2017-09-07 18:30:05 +00:00
s3 . get_bucket_tagging ( Bucket = bucket_name )
2020-10-06 06:04:09 +00:00
e = err . value
2017-09-07 18:30:05 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchTagSet " )
e . response [ " Error " ] [ " Message " ] . should . equal ( " The TagSet does not exist " )
@mock_s3
def test_boto3_put_bucket_cors ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-09-07 18:30:05 +00:00
bucket_name = " mybucket "
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp = s3 . put_bucket_cors (
Bucket = bucket_name ,
CORSConfiguration = {
2017-09-07 18:30:05 +00:00
" CORSRules " : [
{
2019-10-31 15:44:26 +00:00
" AllowedOrigins " : [ " * " ] ,
" AllowedMethods " : [ " GET " , " POST " ] ,
" AllowedHeaders " : [ " Authorization " ] ,
" ExposeHeaders " : [ " x-amz-request-id " ] ,
" MaxAgeSeconds " : 123 ,
} ,
{
" AllowedOrigins " : [ " * " ] ,
" AllowedMethods " : [ " PUT " ] ,
" AllowedHeaders " : [ " Authorization " ] ,
" ExposeHeaders " : [ " x-amz-request-id " ] ,
" MaxAgeSeconds " : 123 ,
} ,
2017-09-07 18:30:05 +00:00
]
2019-10-31 15:44:26 +00:00
} ,
)
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_cors (
Bucket = bucket_name ,
CORSConfiguration = {
" CORSRules " : [
{ " AllowedOrigins " : [ " * " ] , " AllowedMethods " : [ " NOTREAL " , " POST " ] }
]
} ,
)
2020-10-06 06:04:09 +00:00
e = err . value
2017-09-07 18:30:05 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidRequest " )
2019-10-31 15:44:26 +00:00
e . response [ " Error " ] [ " Message " ] . should . equal (
" Found unsupported HTTP method in CORS config. " " Unsupported method is NOTREAL "
)
2017-09-07 18:30:05 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_cors ( Bucket = bucket_name , CORSConfiguration = { " CORSRules " : [ ] } )
2020-10-06 06:04:09 +00:00
e = err . value
2017-09-07 18:30:05 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " MalformedXML " )
# And 101:
many_rules = [ { " AllowedOrigins " : [ " * " ] , " AllowedMethods " : [ " GET " ] } ] * 101
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_cors (
Bucket = bucket_name , CORSConfiguration = { " CORSRules " : many_rules }
)
2020-10-06 06:04:09 +00:00
e = err . value
2017-09-07 18:30:05 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " MalformedXML " )
@mock_s3
def test_boto3_get_bucket_cors ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-09-07 18:30:05 +00:00
bucket_name = " mybucket "
s3 . create_bucket ( Bucket = bucket_name )
# Without CORS:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2017-09-07 18:30:05 +00:00
s3 . get_bucket_cors ( Bucket = bucket_name )
2020-10-06 06:04:09 +00:00
e = err . value
2017-09-07 18:30:05 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchCORSConfiguration " )
e . response [ " Error " ] [ " Message " ] . should . equal ( " The CORS configuration does not exist " )
2019-10-31 15:44:26 +00:00
s3 . put_bucket_cors (
Bucket = bucket_name ,
CORSConfiguration = {
" CORSRules " : [
{
" AllowedOrigins " : [ " * " ] ,
" AllowedMethods " : [ " GET " , " POST " ] ,
" AllowedHeaders " : [ " Authorization " ] ,
" ExposeHeaders " : [ " x-amz-request-id " ] ,
" MaxAgeSeconds " : 123 ,
} ,
{
" AllowedOrigins " : [ " * " ] ,
" AllowedMethods " : [ " PUT " ] ,
" AllowedHeaders " : [ " Authorization " ] ,
" ExposeHeaders " : [ " x-amz-request-id " ] ,
" MaxAgeSeconds " : 123 ,
} ,
]
} ,
)
2017-09-07 18:30:05 +00:00
resp = s3 . get_bucket_cors ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2017-09-07 18:30:05 +00:00
len ( resp [ " CORSRules " ] ) . should . equal ( 2 )
@mock_s3
def test_boto3_delete_bucket_cors ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2017-09-07 18:30:05 +00:00
bucket_name = " mybucket "
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
s3 . put_bucket_cors (
Bucket = bucket_name ,
CORSConfiguration = {
" CORSRules " : [ { " AllowedOrigins " : [ " * " ] , " AllowedMethods " : [ " GET " ] } ]
} ,
)
2017-09-07 18:30:05 +00:00
resp = s3 . delete_bucket_cors ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 204 )
2017-09-07 18:30:05 +00:00
# Verify deletion:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2017-09-07 18:30:05 +00:00
s3 . get_bucket_cors ( Bucket = bucket_name )
2020-10-06 06:04:09 +00:00
e = err . value
2017-09-07 18:30:05 +00:00
e . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchCORSConfiguration " )
e . response [ " Error " ] [ " Message " ] . should . equal ( " The CORS configuration does not exist " )
2018-03-21 16:11:24 +00:00
@mock_s3
def test_put_bucket_notification ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2018-03-21 16:11:24 +00:00
s3 . create_bucket ( Bucket = " bucket " )
# With no configuration:
result = s3 . get_bucket_notification ( Bucket = " bucket " )
assert not result . get ( " TopicConfigurations " )
assert not result . get ( " QueueConfigurations " )
assert not result . get ( " LambdaFunctionConfigurations " )
# Place proper topic configuration:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_notification_configuration (
Bucket = " bucket " ,
NotificationConfiguration = {
" TopicConfigurations " : [
{
" TopicArn " : " arn:aws:sns:us-east-1:012345678910:mytopic " ,
" Events " : [ " s3:ObjectCreated:* " , " s3:ObjectRemoved:* " ] ,
} ,
{
" TopicArn " : " arn:aws:sns:us-east-1:012345678910:myothertopic " ,
" Events " : [ " s3:ObjectCreated:* " ] ,
" Filter " : {
" Key " : {
" FilterRules " : [
{ " Name " : " prefix " , " Value " : " images/ " } ,
{ " Name " : " suffix " , " Value " : " png " } ,
]
}
} ,
} ,
]
} ,
)
2018-03-21 16:11:24 +00:00
# Verify to completion:
result = s3 . get_bucket_notification_configuration ( Bucket = " bucket " )
assert len ( result [ " TopicConfigurations " ] ) == 2
assert not result . get ( " QueueConfigurations " )
assert not result . get ( " LambdaFunctionConfigurations " )
2019-10-31 15:44:26 +00:00
assert (
result [ " TopicConfigurations " ] [ 0 ] [ " TopicArn " ]
== " arn:aws:sns:us-east-1:012345678910:mytopic "
)
assert (
result [ " TopicConfigurations " ] [ 1 ] [ " TopicArn " ]
== " arn:aws:sns:us-east-1:012345678910:myothertopic "
)
2018-03-21 16:11:24 +00:00
assert len ( result [ " TopicConfigurations " ] [ 0 ] [ " Events " ] ) == 2
assert len ( result [ " TopicConfigurations " ] [ 1 ] [ " Events " ] ) == 1
assert result [ " TopicConfigurations " ] [ 0 ] [ " Events " ] [ 0 ] == " s3:ObjectCreated:* "
assert result [ " TopicConfigurations " ] [ 0 ] [ " Events " ] [ 1 ] == " s3:ObjectRemoved:* "
assert result [ " TopicConfigurations " ] [ 1 ] [ " Events " ] [ 0 ] == " s3:ObjectCreated:* "
assert result [ " TopicConfigurations " ] [ 0 ] [ " Id " ]
assert result [ " TopicConfigurations " ] [ 1 ] [ " Id " ]
assert not result [ " TopicConfigurations " ] [ 0 ] . get ( " Filter " )
assert len ( result [ " TopicConfigurations " ] [ 1 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] ) == 2
2019-10-31 15:44:26 +00:00
assert (
result [ " TopicConfigurations " ] [ 1 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] [ 0 ] [ " Name " ]
== " prefix "
)
assert (
result [ " TopicConfigurations " ] [ 1 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] [ 0 ] [ " Value " ]
== " images/ "
)
assert (
result [ " TopicConfigurations " ] [ 1 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] [ 1 ] [ " Name " ]
== " suffix "
)
assert (
result [ " TopicConfigurations " ] [ 1 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] [ 1 ] [ " Value " ]
== " png "
)
2018-03-21 16:11:24 +00:00
# Place proper queue configuration:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_notification_configuration (
Bucket = " bucket " ,
NotificationConfiguration = {
" QueueConfigurations " : [
{
" Id " : " SomeID " ,
" QueueArn " : " arn:aws:sqs:us-east-1:012345678910:myQueue " ,
" Events " : [ " s3:ObjectCreated:* " ] ,
" Filter " : {
" Key " : { " FilterRules " : [ { " Name " : " prefix " , " Value " : " images/ " } ] }
} ,
}
]
} ,
)
2018-03-21 16:11:24 +00:00
result = s3 . get_bucket_notification_configuration ( Bucket = " bucket " )
assert len ( result [ " QueueConfigurations " ] ) == 1
assert not result . get ( " TopicConfigurations " )
assert not result . get ( " LambdaFunctionConfigurations " )
assert result [ " QueueConfigurations " ] [ 0 ] [ " Id " ] == " SomeID "
2019-10-31 15:44:26 +00:00
assert (
result [ " QueueConfigurations " ] [ 0 ] [ " QueueArn " ]
== " arn:aws:sqs:us-east-1:012345678910:myQueue "
)
2018-03-21 16:11:24 +00:00
assert result [ " QueueConfigurations " ] [ 0 ] [ " Events " ] [ 0 ] == " s3:ObjectCreated:* "
assert len ( result [ " QueueConfigurations " ] [ 0 ] [ " Events " ] ) == 1
assert len ( result [ " QueueConfigurations " ] [ 0 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] ) == 1
2019-10-31 15:44:26 +00:00
assert (
result [ " QueueConfigurations " ] [ 0 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] [ 0 ] [ " Name " ]
== " prefix "
)
assert (
result [ " QueueConfigurations " ] [ 0 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] [ 0 ] [ " Value " ]
== " images/ "
)
2018-03-21 16:11:24 +00:00
# Place proper Lambda configuration:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_notification_configuration (
Bucket = " bucket " ,
NotificationConfiguration = {
" LambdaFunctionConfigurations " : [
{
" LambdaFunctionArn " : " arn:aws:lambda:us-east-1:012345678910:function:lambda " ,
" Events " : [ " s3:ObjectCreated:* " ] ,
" Filter " : {
" Key " : { " FilterRules " : [ { " Name " : " prefix " , " Value " : " images/ " } ] }
} ,
}
]
} ,
)
2018-03-21 16:11:24 +00:00
result = s3 . get_bucket_notification_configuration ( Bucket = " bucket " )
assert len ( result [ " LambdaFunctionConfigurations " ] ) == 1
assert not result . get ( " TopicConfigurations " )
assert not result . get ( " QueueConfigurations " )
assert result [ " LambdaFunctionConfigurations " ] [ 0 ] [ " Id " ]
2019-10-31 15:44:26 +00:00
assert (
result [ " LambdaFunctionConfigurations " ] [ 0 ] [ " LambdaFunctionArn " ]
== " arn:aws:lambda:us-east-1:012345678910:function:lambda "
)
assert (
result [ " LambdaFunctionConfigurations " ] [ 0 ] [ " Events " ] [ 0 ] == " s3:ObjectCreated:* "
)
2018-03-21 16:11:24 +00:00
assert len ( result [ " LambdaFunctionConfigurations " ] [ 0 ] [ " Events " ] ) == 1
2019-10-31 15:44:26 +00:00
assert (
len ( result [ " LambdaFunctionConfigurations " ] [ 0 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] )
== 1
)
assert (
result [ " LambdaFunctionConfigurations " ] [ 0 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] [ 0 ] [
" Name "
]
== " prefix "
)
assert (
result [ " LambdaFunctionConfigurations " ] [ 0 ] [ " Filter " ] [ " Key " ] [ " FilterRules " ] [ 0 ] [
" Value "
]
== " images/ "
)
2018-03-21 16:11:24 +00:00
# And with all 3 set:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_notification_configuration (
Bucket = " bucket " ,
NotificationConfiguration = {
" TopicConfigurations " : [
{
" TopicArn " : " arn:aws:sns:us-east-1:012345678910:mytopic " ,
" Events " : [ " s3:ObjectCreated:* " , " s3:ObjectRemoved:* " ] ,
}
] ,
" LambdaFunctionConfigurations " : [
{
" LambdaFunctionArn " : " arn:aws:lambda:us-east-1:012345678910:function:lambda " ,
" Events " : [ " s3:ObjectCreated:* " ] ,
}
] ,
" QueueConfigurations " : [
{
" QueueArn " : " arn:aws:sqs:us-east-1:012345678910:myQueue " ,
" Events " : [ " s3:ObjectCreated:* " ] ,
}
] ,
} ,
)
2018-03-21 16:11:24 +00:00
result = s3 . get_bucket_notification_configuration ( Bucket = " bucket " )
assert len ( result [ " LambdaFunctionConfigurations " ] ) == 1
assert len ( result [ " TopicConfigurations " ] ) == 1
assert len ( result [ " QueueConfigurations " ] ) == 1
# And clear it out:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_notification_configuration (
Bucket = " bucket " , NotificationConfiguration = { }
)
2018-03-21 16:11:24 +00:00
result = s3 . get_bucket_notification_configuration ( Bucket = " bucket " )
assert not result . get ( " TopicConfigurations " )
assert not result . get ( " QueueConfigurations " )
assert not result . get ( " LambdaFunctionConfigurations " )
@mock_s3
def test_put_bucket_notification_errors ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2018-03-21 16:11:24 +00:00
s3 . create_bucket ( Bucket = " bucket " )
# With incorrect ARNs:
2021-10-18 19:44:29 +00:00
for tech in [ " Queue " , " Topic " , " LambdaFunction " ] :
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_notification_configuration (
Bucket = " bucket " ,
NotificationConfiguration = {
" {} Configurations " . format ( tech ) : [
{
" {} Arn " . format (
tech
) : " arn:aws: {} :us-east-1:012345678910:lksajdfkldskfj " ,
" Events " : [ " s3:ObjectCreated:* " ] ,
}
]
} ,
)
2018-03-21 16:11:24 +00:00
2020-10-06 06:04:09 +00:00
assert err . value . response [ " Error " ] [ " Code " ] == " InvalidArgument "
2020-10-06 06:46:05 +00:00
assert err . value . response [ " Error " ] [ " Message " ] == " The ARN is not well formed "
2018-03-21 16:11:24 +00:00
# Region not the same as the bucket:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_notification_configuration (
Bucket = " bucket " ,
NotificationConfiguration = {
" QueueConfigurations " : [
{
" QueueArn " : " arn:aws:sqs:us-west-2:012345678910:lksajdfkldskfj " ,
" Events " : [ " s3:ObjectCreated:* " ] ,
}
]
} ,
)
2018-03-21 16:11:24 +00:00
2020-10-06 06:04:09 +00:00
assert err . value . response [ " Error " ] [ " Code " ] == " InvalidArgument "
2019-10-31 15:44:26 +00:00
assert (
2020-10-06 06:04:09 +00:00
err . value . response [ " Error " ] [ " Message " ]
2019-10-31 15:44:26 +00:00
== " The notification destination service region is not valid for the bucket location constraint "
)
2018-03-21 16:11:24 +00:00
# Invalid event name:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_notification_configuration (
Bucket = " bucket " ,
NotificationConfiguration = {
" QueueConfigurations " : [
{
" QueueArn " : " arn:aws:sqs:us-east-1:012345678910:lksajdfkldskfj " ,
" Events " : [ " notarealeventname " ] ,
}
]
} ,
)
2020-10-06 06:04:09 +00:00
assert err . value . response [ " Error " ] [ " Code " ] == " InvalidArgument "
2019-10-31 15:44:26 +00:00
assert (
2020-10-06 06:04:09 +00:00
err . value . response [ " Error " ] [ " Message " ]
2019-10-31 15:44:26 +00:00
== " The event is not supported for notifications "
)
2018-03-21 16:11:24 +00:00
2018-01-03 04:47:57 +00:00
@mock_s3
def test_boto3_put_bucket_logging ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2018-01-03 04:47:57 +00:00
bucket_name = " mybucket "
log_bucket = " logbucket "
wrong_region_bucket = " wrongregionlogbucket "
s3 . create_bucket ( Bucket = bucket_name )
s3 . create_bucket ( Bucket = log_bucket ) # Adding the ACL for log-delivery later...
2019-10-31 15:44:26 +00:00
s3 . create_bucket (
Bucket = wrong_region_bucket ,
CreateBucketConfiguration = { " LocationConstraint " : " us-west-2 " } ,
)
2018-01-03 04:47:57 +00:00
# No logging config:
result = s3 . get_bucket_logging ( Bucket = bucket_name )
assert not result . get ( " LoggingEnabled " )
# A log-bucket that doesn't exist:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_logging (
Bucket = bucket_name ,
BucketLoggingStatus = {
" LoggingEnabled " : { " TargetBucket " : " IAMNOTREAL " , " TargetPrefix " : " " }
} ,
)
2020-10-06 06:04:09 +00:00
assert err . value . response [ " Error " ] [ " Code " ] == " InvalidTargetBucketForLogging "
2018-01-03 04:47:57 +00:00
# A log-bucket that's missing the proper ACLs for LogDelivery:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_logging (
Bucket = bucket_name ,
BucketLoggingStatus = {
" LoggingEnabled " : { " TargetBucket " : log_bucket , " TargetPrefix " : " " }
} ,
)
2020-10-06 06:04:09 +00:00
assert err . value . response [ " Error " ] [ " Code " ] == " InvalidTargetBucketForLogging "
assert " log-delivery " in err . value . response [ " Error " ] [ " Message " ]
2018-01-03 04:47:57 +00:00
# Add the proper "log-delivery" ACL to the log buckets:
bucket_owner = s3 . get_bucket_acl ( Bucket = log_bucket ) [ " Owner " ]
for bucket in [ log_bucket , wrong_region_bucket ] :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_acl (
Bucket = bucket ,
AccessControlPolicy = {
" Grants " : [
{
" Grantee " : {
" URI " : " http://acs.amazonaws.com/groups/s3/LogDelivery " ,
" Type " : " Group " ,
} ,
" Permission " : " WRITE " ,
2018-01-03 04:47:57 +00:00
} ,
2019-10-31 15:44:26 +00:00
{
" Grantee " : {
" URI " : " http://acs.amazonaws.com/groups/s3/LogDelivery " ,
" Type " : " Group " ,
} ,
" Permission " : " READ_ACP " ,
2018-01-03 04:47:57 +00:00
} ,
2019-10-31 15:44:26 +00:00
{
" Grantee " : { " Type " : " CanonicalUser " , " ID " : bucket_owner [ " ID " ] } ,
" Permission " : " FULL_CONTROL " ,
2018-01-03 04:47:57 +00:00
} ,
2019-10-31 15:44:26 +00:00
] ,
" Owner " : bucket_owner ,
} ,
)
2018-01-03 04:47:57 +00:00
# A log-bucket that's in the wrong region:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_logging (
Bucket = bucket_name ,
BucketLoggingStatus = {
" LoggingEnabled " : {
" TargetBucket " : wrong_region_bucket ,
" TargetPrefix " : " " ,
}
} ,
)
2020-10-06 06:04:09 +00:00
assert err . value . response [ " Error " ] [ " Code " ] == " CrossLocationLoggingProhibitted "
2018-01-03 04:47:57 +00:00
# Correct logging:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_logging (
Bucket = bucket_name ,
BucketLoggingStatus = {
" LoggingEnabled " : {
" TargetBucket " : log_bucket ,
" TargetPrefix " : " {} / " . format ( bucket_name ) ,
}
} ,
)
2018-01-03 04:47:57 +00:00
result = s3 . get_bucket_logging ( Bucket = bucket_name )
assert result [ " LoggingEnabled " ] [ " TargetBucket " ] == log_bucket
assert result [ " LoggingEnabled " ] [ " TargetPrefix " ] == " {} / " . format ( bucket_name )
assert not result [ " LoggingEnabled " ] . get ( " TargetGrants " )
# And disabling:
s3 . put_bucket_logging ( Bucket = bucket_name , BucketLoggingStatus = { } )
assert not s3 . get_bucket_logging ( Bucket = bucket_name ) . get ( " LoggingEnabled " )
# And enabling with multiple target grants:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_logging (
Bucket = bucket_name ,
BucketLoggingStatus = {
" LoggingEnabled " : {
" TargetBucket " : log_bucket ,
" TargetPrefix " : " {} / " . format ( bucket_name ) ,
" TargetGrants " : [
{
" Grantee " : {
" ID " : " SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274 " ,
" Type " : " CanonicalUser " ,
} ,
" Permission " : " READ " ,
2018-01-03 04:47:57 +00:00
} ,
2019-10-31 15:44:26 +00:00
{
" Grantee " : {
" ID " : " SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274 " ,
" Type " : " CanonicalUser " ,
} ,
" Permission " : " WRITE " ,
2018-01-03 04:47:57 +00:00
} ,
2019-10-31 15:44:26 +00:00
] ,
}
} ,
)
2018-01-03 04:47:57 +00:00
result = s3 . get_bucket_logging ( Bucket = bucket_name )
assert len ( result [ " LoggingEnabled " ] [ " TargetGrants " ] ) == 2
2019-10-31 15:44:26 +00:00
assert (
result [ " LoggingEnabled " ] [ " TargetGrants " ] [ 0 ] [ " Grantee " ] [ " ID " ]
== " SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274 "
)
2018-01-03 04:47:57 +00:00
# Test with just 1 grant:
2019-10-31 15:44:26 +00:00
s3 . put_bucket_logging (
Bucket = bucket_name ,
BucketLoggingStatus = {
2018-01-03 04:47:57 +00:00
" LoggingEnabled " : {
" TargetBucket " : log_bucket ,
" TargetPrefix " : " {} / " . format ( bucket_name ) ,
" TargetGrants " : [
{
" Grantee " : {
" ID " : " SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274 " ,
2019-10-31 15:44:26 +00:00
" Type " : " CanonicalUser " ,
2018-01-03 04:47:57 +00:00
} ,
2019-10-31 15:44:26 +00:00
" Permission " : " READ " ,
2018-01-03 04:47:57 +00:00
}
2019-10-31 15:44:26 +00:00
] ,
2018-01-03 04:47:57 +00:00
}
2019-10-31 15:44:26 +00:00
} ,
)
result = s3 . get_bucket_logging ( Bucket = bucket_name )
assert len ( result [ " LoggingEnabled " ] [ " TargetGrants " ] ) == 1
# With an invalid grant:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-31 15:44:26 +00:00
s3 . put_bucket_logging (
Bucket = bucket_name ,
BucketLoggingStatus = {
" LoggingEnabled " : {
" TargetBucket " : log_bucket ,
" TargetPrefix " : " {} / " . format ( bucket_name ) ,
" TargetGrants " : [
{
" Grantee " : {
" ID " : " SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274 " ,
" Type " : " CanonicalUser " ,
} ,
" Permission " : " NOTAREALPERM " ,
}
] ,
}
} ,
)
2020-10-06 06:04:09 +00:00
assert err . value . response [ " Error " ] [ " Code " ] == " MalformedXML "
2018-01-03 04:47:57 +00:00
2017-07-16 02:36:12 +00:00
@mock_s3
def test_boto3_put_object_tagging ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-tags "
2017-07-16 02:36:12 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2017-07-16 02:36:12 +00:00
s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = {
" TagSet " : [
{ " Key " : " item1 " , " Value " : " foo " } ,
{ " Key " : " item2 " , " Value " : " bar " } ,
]
} ,
2017-07-16 02:36:12 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2019-10-31 15:44:26 +00:00
e . response [ " Error " ] . should . equal (
{
" Code " : " NoSuchKey " ,
" Message " : " The specified key does not exist. " ,
2021-11-14 17:16:58 +00:00
" Key " : " key-with-tags " ,
2019-10-31 15:44:26 +00:00
" RequestID " : " 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE " ,
}
2017-07-16 02:36:12 +00:00
)
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2021-10-15 19:10:28 +00:00
# using system tags will fail
with pytest . raises ( ClientError ) as err :
s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
Tagging = { " TagSet " : [ { " Key " : " aws:item1 " , " Value " : " foo " } , ] } ,
)
e = err . value
e . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidTag " )
2017-07-16 02:36:12 +00:00
resp = s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = {
" TagSet " : [
{ " Key " : " item1 " , " Value " : " foo " } ,
{ " Key " : " item2 " , " Value " : " bar " } ,
]
} ,
2017-07-16 02:36:12 +00:00
)
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2017-07-16 02:36:12 +00:00
2019-10-17 04:16:16 +00:00
@mock_s3
def test_boto3_put_object_tagging_on_earliest_version ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-tags "
2019-10-17 04:16:16 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
s3_resource = boto3 . resource ( " s3 " )
2019-10-17 04:16:16 +00:00
bucket_versioning = s3_resource . BucketVersioning ( bucket_name )
bucket_versioning . enable ( )
2019-10-31 15:44:26 +00:00
bucket_versioning . status . should . equal ( " Enabled " )
2019-10-17 04:16:16 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-17 04:16:16 +00:00
s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = {
" TagSet " : [
{ " Key " : " item1 " , " Value " : " foo " } ,
{ " Key " : " item2 " , " Value " : " bar " } ,
]
} ,
2019-10-17 04:16:16 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2019-10-31 15:44:26 +00:00
e . response [ " Error " ] . should . equal (
{
" Code " : " NoSuchKey " ,
" Message " : " The specified key does not exist. " ,
2021-11-14 17:16:58 +00:00
" Key " : " key-with-tags " ,
2019-10-31 15:44:26 +00:00
" RequestID " : " 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE " ,
}
2019-10-17 04:16:16 +00:00
)
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test_updated " )
2019-10-17 04:16:16 +00:00
object_versions = list ( s3_resource . Bucket ( bucket_name ) . object_versions . all ( ) )
first_object = object_versions [ 0 ]
second_object = object_versions [ 1 ]
resp = s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = {
" TagSet " : [
{ " Key " : " item1 " , " Value " : " foo " } ,
{ " Key " : " item2 " , " Value " : " bar " } ,
]
} ,
VersionId = first_object . id ,
2019-10-17 04:16:16 +00:00
)
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2019-10-17 04:16:16 +00:00
# Older version has tags while the most recent does not
resp = s3 . get_object_tagging ( Bucket = bucket_name , Key = key , VersionId = first_object . id )
2022-01-26 23:24:51 +00:00
resp [ " VersionId " ] . should . equal ( first_object . id )
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2020-03-31 11:04:04 +00:00
sorted_tagset = sorted ( resp [ " TagSet " ] , key = lambda t : t [ " Key " ] )
sorted_tagset . should . equal (
2019-10-31 15:44:26 +00:00
[ { " Key " : " item1 " , " Value " : " foo " } , { " Key " : " item2 " , " Value " : " bar " } ]
2019-10-17 04:16:16 +00:00
)
2019-10-31 15:44:26 +00:00
resp = s3 . get_object_tagging (
Bucket = bucket_name , Key = key , VersionId = second_object . id
)
2022-01-26 23:24:51 +00:00
resp [ " VersionId " ] . should . equal ( second_object . id )
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
resp [ " TagSet " ] . should . equal ( [ ] )
2019-10-17 04:16:16 +00:00
@mock_s3
def test_boto3_put_object_tagging_on_both_version ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-tags "
2019-10-17 04:16:16 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
s3_resource = boto3 . resource ( " s3 " )
2019-10-17 04:16:16 +00:00
bucket_versioning = s3_resource . BucketVersioning ( bucket_name )
bucket_versioning . enable ( )
2019-10-31 15:44:26 +00:00
bucket_versioning . status . should . equal ( " Enabled " )
2019-10-17 04:16:16 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as err :
2019-10-17 04:16:16 +00:00
s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = {
" TagSet " : [
{ " Key " : " item1 " , " Value " : " foo " } ,
{ " Key " : " item2 " , " Value " : " bar " } ,
]
} ,
2019-10-17 04:16:16 +00:00
)
2020-10-06 06:04:09 +00:00
e = err . value
2019-10-31 15:44:26 +00:00
e . response [ " Error " ] . should . equal (
{
" Code " : " NoSuchKey " ,
" Message " : " The specified key does not exist. " ,
2021-11-14 17:16:58 +00:00
" Key " : " key-with-tags " ,
2019-10-31 15:44:26 +00:00
" RequestID " : " 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE " ,
}
2019-10-17 04:16:16 +00:00
)
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test_updated " )
2019-10-17 04:16:16 +00:00
object_versions = list ( s3_resource . Bucket ( bucket_name ) . object_versions . all ( ) )
first_object = object_versions [ 0 ]
second_object = object_versions [ 1 ]
resp = s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = {
" TagSet " : [
{ " Key " : " item1 " , " Value " : " foo " } ,
{ " Key " : " item2 " , " Value " : " bar " } ,
]
} ,
VersionId = first_object . id ,
2019-10-17 04:16:16 +00:00
)
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2019-10-17 04:16:16 +00:00
resp = s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = {
" TagSet " : [
{ " Key " : " item1 " , " Value " : " baz " } ,
{ " Key " : " item2 " , " Value " : " bin " } ,
]
} ,
VersionId = second_object . id ,
2019-10-17 04:16:16 +00:00
)
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2019-10-17 04:16:16 +00:00
resp = s3 . get_object_tagging ( Bucket = bucket_name , Key = key , VersionId = first_object . id )
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2020-03-31 11:04:04 +00:00
sorted_tagset = sorted ( resp [ " TagSet " ] , key = lambda t : t [ " Key " ] )
sorted_tagset . should . equal (
2019-10-31 15:44:26 +00:00
[ { " Key " : " item1 " , " Value " : " foo " } , { " Key " : " item2 " , " Value " : " bar " } ]
2019-10-17 04:16:16 +00:00
)
2019-10-31 15:44:26 +00:00
resp = s3 . get_object_tagging (
Bucket = bucket_name , Key = key , VersionId = second_object . id
)
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2020-03-31 11:04:04 +00:00
sorted_tagset = sorted ( resp [ " TagSet " ] , key = lambda t : t [ " Key " ] )
sorted_tagset . should . equal (
2019-10-31 15:44:26 +00:00
[ { " Key " : " item1 " , " Value " : " baz " } , { " Key " : " item2 " , " Value " : " bin " } ]
2019-10-17 04:16:16 +00:00
)
2017-11-03 06:03:54 +00:00
@mock_s3
def test_boto3_put_object_tagging_with_single_tag ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-tags "
2017-11-03 06:03:54 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2017-11-03 06:03:54 +00:00
resp = s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = { " TagSet " : [ { " Key " : " item1 " , " Value " : " foo " } ] } ,
2017-11-03 06:03:54 +00:00
)
2019-10-31 15:44:26 +00:00
resp [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
2017-11-03 06:03:54 +00:00
2017-07-16 02:36:12 +00:00
@mock_s3
def test_boto3_get_object_tagging ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-tags "
2017-07-16 02:36:12 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = " test " )
2017-07-16 02:36:12 +00:00
resp = s3 . get_object_tagging ( Bucket = bucket_name , Key = key )
2019-10-31 15:44:26 +00:00
resp [ " TagSet " ] . should . have . length_of ( 0 )
2017-07-16 02:36:12 +00:00
resp = s3 . put_object_tagging (
Bucket = bucket_name ,
Key = key ,
2019-10-31 15:44:26 +00:00
Tagging = {
" TagSet " : [
{ " Key " : " item1 " , " Value " : " foo " } ,
{ " Key " : " item2 " , " Value " : " bar " } ,
]
} ,
2017-07-16 02:36:12 +00:00
)
resp = s3 . get_object_tagging ( Bucket = bucket_name , Key = key )
2019-10-31 15:44:26 +00:00
resp [ " TagSet " ] . should . have . length_of ( 2 )
resp [ " TagSet " ] . should . contain ( { " Key " : " item1 " , " Value " : " foo " } )
resp [ " TagSet " ] . should . contain ( { " Key " : " item2 " , " Value " : " bar " } )
2017-07-16 02:36:12 +00:00
2017-04-29 18:56:48 +00:00
@mock_s3
def test_boto3_list_object_versions ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2021-10-14 18:13:40 +00:00
bucket_name = " 000 " + str ( uuid4 ( ) )
2019-10-31 15:44:26 +00:00
key = " key-with-versions "
2017-04-29 18:56:48 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2017-05-14 17:00:26 +00:00
s3 . put_bucket_versioning (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , VersioningConfiguration = { " Status " : " Enabled " }
2017-05-14 17:00:26 +00:00
)
2021-07-26 06:40:39 +00:00
items = ( b " v1 " , b " v2 " )
2017-04-30 05:03:46 +00:00
for body in items :
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = body )
response = s3 . list_object_versions ( Bucket = bucket_name )
2017-04-29 18:56:48 +00:00
# Two object versions should be returned
2019-10-31 15:44:26 +00:00
len ( response [ " Versions " ] ) . should . equal ( 2 )
keys = set ( [ item [ " Key " ] for item in response [ " Versions " ] ] )
2017-04-29 18:56:48 +00:00
keys . should . equal ( { key } )
2021-01-15 18:28:28 +00:00
# the first item in the list should be the latest
response [ " Versions " ] [ 0 ] [ " IsLatest " ] . should . equal ( True )
2017-04-29 18:56:48 +00:00
# Test latest object version is returned
response = s3 . get_object ( Bucket = bucket_name , Key = key )
2019-10-31 15:44:26 +00:00
response [ " Body " ] . read ( ) . should . equal ( items [ - 1 ] )
2017-04-29 18:56:48 +00:00
2021-10-14 18:13:40 +00:00
@mock_s3
def test_boto3_list_object_versions_with_delimiter ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " 000 " + str ( uuid4 ( ) )
s3 . create_bucket ( Bucket = bucket_name )
s3 . put_bucket_versioning (
Bucket = bucket_name , VersioningConfiguration = { " Status " : " Enabled " }
)
for key_index in list ( range ( 1 , 5 ) ) + list ( range ( 10 , 14 ) ) :
for version_index in range ( 1 , 4 ) :
body = f " data- { version_index } " . encode ( " UTF-8 " )
s3 . put_object (
Bucket = bucket_name , Key = f " key { key_index } -with-data " , Body = body
)
s3 . put_object (
Bucket = bucket_name , Key = f " key { key_index } -without-data " , Body = b " "
)
response = s3 . list_object_versions ( Bucket = bucket_name )
# All object versions should be returned
len ( response [ " Versions " ] ) . should . equal (
48
) # 8 keys * 2 (one with, one without) * 3 versions per key
# Use start of key as delimiter
response = s3 . list_object_versions ( Bucket = bucket_name , Delimiter = " key1 " )
response . should . have . key ( " CommonPrefixes " ) . equal ( [ { " Prefix " : " key1 " } ] )
response . should . have . key ( " Delimiter " ) . equal ( " key1 " )
# 3 keys that do not contain the phrase 'key1' (key2, key3, key4) * * 2 * 3
response . should . have . key ( " Versions " ) . length_of ( 18 )
# Use in-between key as delimiter
response = s3 . list_object_versions ( Bucket = bucket_name , Delimiter = " -with- " )
response . should . have . key ( " CommonPrefixes " ) . equal (
[
{ " Prefix " : " key1-with- " } ,
{ " Prefix " : " key10-with- " } ,
{ " Prefix " : " key11-with- " } ,
{ " Prefix " : " key12-with- " } ,
{ " Prefix " : " key13-with- " } ,
{ " Prefix " : " key2-with- " } ,
{ " Prefix " : " key3-with- " } ,
{ " Prefix " : " key4-with- " } ,
]
)
response . should . have . key ( " Delimiter " ) . equal ( " -with- " )
# key(1/10/11/12/13)-without, key(2/3/4)-without
response . should . have . key ( " Versions " ) . length_of ( 8 * 1 * 3 )
# Use in-between key as delimiter
response = s3 . list_object_versions ( Bucket = bucket_name , Delimiter = " 1-with- " )
response . should . have . key ( " CommonPrefixes " ) . equal (
[ { " Prefix " : " key1-with- " } , { " Prefix " : " key11-with- " } ]
)
response . should . have . key ( " Delimiter " ) . equal ( " 1-with- " )
response . should . have . key ( " Versions " ) . length_of ( 42 )
all_keys = set ( [ v [ " Key " ] for v in response [ " Versions " ] ] )
all_keys . should . contain ( " key1-without-data " )
all_keys . shouldnt . contain ( " key1-with-data " )
all_keys . should . contain ( " key4-with-data " )
all_keys . should . contain ( " key4-without-data " )
# Use in-between key as delimiter + prefix
response = s3 . list_object_versions (
Bucket = bucket_name , Prefix = " key1 " , Delimiter = " with- "
)
response . should . have . key ( " CommonPrefixes " ) . equal (
[
{ " Prefix " : " key1-with- " } ,
{ " Prefix " : " key10-with- " } ,
{ " Prefix " : " key11-with- " } ,
{ " Prefix " : " key12-with- " } ,
{ " Prefix " : " key13-with- " } ,
]
)
response . should . have . key ( " Delimiter " ) . equal ( " with- " )
response . should . have . key ( " KeyMarker " ) . equal ( " " )
response . shouldnt . have . key ( " NextKeyMarker " )
response . should . have . key ( " Versions " ) . length_of ( 15 )
all_keys = set ( [ v [ " Key " ] for v in response [ " Versions " ] ] )
all_keys . should . equal (
{
" key1-without-data " ,
" key10-without-data " ,
" key11-without-data " ,
" key13-without-data " ,
" key12-without-data " ,
}
)
# Start at KeyMarker, and filter using Prefix+Delimiter for all subsequent keys
response = s3 . list_object_versions (
Bucket = bucket_name , Prefix = " key1 " , Delimiter = " with- " , KeyMarker = " key11 "
)
response . should . have . key ( " CommonPrefixes " ) . equal (
[
{ " Prefix " : " key11-with- " } ,
{ " Prefix " : " key12-with- " } ,
{ " Prefix " : " key13-with- " } ,
]
)
response . should . have . key ( " Delimiter " ) . equal ( " with- " )
response . should . have . key ( " KeyMarker " ) . equal ( " key11 " )
response . shouldnt . have . key ( " NextKeyMarker " )
response . should . have . key ( " Versions " ) . length_of ( 9 )
all_keys = set ( [ v [ " Key " ] for v in response [ " Versions " ] ] )
all_keys . should . equal (
{ " key11-without-data " , " key12-without-data " , " key13-without-data " }
)
2019-03-08 21:01:27 +00:00
@mock_s3
def test_boto3_list_object_versions_with_versioning_disabled ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-versions "
2019-03-08 21:01:27 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2021-07-26 06:40:39 +00:00
items = ( b " v1 " , b " v2 " )
2019-03-08 21:01:27 +00:00
for body in items :
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = body )
response = s3 . list_object_versions ( Bucket = bucket_name )
2019-03-08 21:01:27 +00:00
# One object version should be returned
2019-10-31 15:44:26 +00:00
len ( response [ " Versions " ] ) . should . equal ( 1 )
response [ " Versions " ] [ 0 ] [ " Key " ] . should . equal ( key )
2019-03-08 21:01:27 +00:00
# The version id should be the string null
2019-10-31 15:44:26 +00:00
response [ " Versions " ] [ 0 ] [ " VersionId " ] . should . equal ( " null " )
2019-03-08 21:01:27 +00:00
# Test latest object version is returned
response = s3 . get_object ( Bucket = bucket_name , Key = key )
2019-10-31 15:44:26 +00:00
response [ " Body " ] . read ( ) . should . equal ( items [ - 1 ] )
2019-03-08 21:01:27 +00:00
@mock_s3
def test_boto3_list_object_versions_with_versioning_enabled_late ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-versions "
2019-03-08 21:01:27 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2021-07-26 06:40:39 +00:00
items = ( b " v1 " , b " v2 " )
s3 . put_object ( Bucket = bucket_name , Key = key , Body = b " v1 " )
2019-03-08 21:01:27 +00:00
s3 . put_bucket_versioning (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , VersioningConfiguration = { " Status " : " Enabled " }
2019-03-08 21:01:27 +00:00
)
2021-07-26 06:40:39 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = b " v2 " )
2019-10-31 15:44:26 +00:00
response = s3 . list_object_versions ( Bucket = bucket_name )
2019-03-08 21:01:27 +00:00
# Two object versions should be returned
2019-10-31 15:44:26 +00:00
len ( response [ " Versions " ] ) . should . equal ( 2 )
keys = set ( [ item [ " Key " ] for item in response [ " Versions " ] ] )
2019-03-08 21:01:27 +00:00
keys . should . equal ( { key } )
# There should still be a null version id.
2019-10-31 15:44:26 +00:00
versionsId = set ( [ item [ " VersionId " ] for item in response [ " Versions " ] ] )
versionsId . should . contain ( " null " )
2019-03-08 21:01:27 +00:00
# Test latest object version is returned
response = s3 . get_object ( Bucket = bucket_name , Key = key )
2019-10-31 15:44:26 +00:00
response [ " Body " ] . read ( ) . should . equal ( items [ - 1 ] )
2019-03-08 21:01:27 +00:00
2019-09-24 00:16:20 +00:00
2018-06-22 18:59:01 +00:00
@mock_s3
def test_boto3_bad_prefix_list_object_versions ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-versions "
bad_prefix = " key-that-does-not-exist "
2018-06-22 18:59:01 +00:00
s3 . create_bucket ( Bucket = bucket_name )
s3 . put_bucket_versioning (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , VersioningConfiguration = { " Status " : " Enabled " }
2018-06-22 18:59:01 +00:00
)
2021-07-26 06:40:39 +00:00
items = ( b " v1 " , b " v2 " )
2018-06-22 18:59:01 +00:00
for body in items :
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = body )
response = s3 . list_object_versions ( Bucket = bucket_name , Prefix = bad_prefix )
response [ " ResponseMetadata " ] [ " HTTPStatusCode " ] . should . equal ( 200 )
response . should_not . contain ( " Versions " )
response . should_not . contain ( " DeleteMarkers " )
2018-06-22 18:59:01 +00:00
2017-05-14 17:00:26 +00:00
@mock_s3
def test_boto3_delete_markers ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-versions-and-unicode-ó "
2017-05-14 17:00:26 +00:00
s3 . create_bucket ( Bucket = bucket_name )
s3 . put_bucket_versioning (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , VersioningConfiguration = { " Status " : " Enabled " }
2017-05-14 17:00:26 +00:00
)
2021-07-26 06:40:39 +00:00
items = ( b " v1 " , b " v2 " )
2017-05-14 17:00:26 +00:00
for body in items :
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = body )
2017-11-06 21:39:08 +00:00
2019-10-31 15:44:26 +00:00
s3 . delete_objects ( Bucket = bucket_name , Delete = { " Objects " : [ { " Key " : key } ] } )
2017-11-06 21:39:08 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2019-10-31 15:44:26 +00:00
s3 . get_object ( Bucket = bucket_name , Key = key )
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchKey " )
2019-03-08 21:01:27 +00:00
2019-10-31 15:44:26 +00:00
response = s3 . list_object_versions ( Bucket = bucket_name )
response [ " Versions " ] . should . have . length_of ( 2 )
response [ " DeleteMarkers " ] . should . have . length_of ( 1 )
2017-05-14 17:00:26 +00:00
s3 . delete_object (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , Key = key , VersionId = response [ " DeleteMarkers " ] [ 0 ] [ " VersionId " ]
2017-05-14 17:00:26 +00:00
)
2019-10-31 15:44:26 +00:00
response = s3 . get_object ( Bucket = bucket_name , Key = key )
response [ " Body " ] . read ( ) . should . equal ( items [ - 1 ] )
2019-03-08 21:01:27 +00:00
2019-10-31 15:44:26 +00:00
response = s3 . list_object_versions ( Bucket = bucket_name )
response [ " Versions " ] . should . have . length_of ( 2 )
2017-11-06 21:39:08 +00:00
# We've asserted there is only 2 records so one is newest, one is oldest
2019-10-31 15:44:26 +00:00
latest = list ( filter ( lambda item : item [ " IsLatest " ] , response [ " Versions " ] ) ) [ 0 ]
oldest = list ( filter ( lambda item : not item [ " IsLatest " ] , response [ " Versions " ] ) ) [ 0 ]
2017-11-06 21:39:08 +00:00
# Double check ordering of version ID's
2019-10-31 15:44:26 +00:00
latest [ " VersionId " ] . should_not . equal ( oldest [ " VersionId " ] )
2017-11-06 21:39:08 +00:00
# Double check the name is still unicode
2019-10-31 15:44:26 +00:00
latest [ " Key " ] . should . equal ( " key-with-versions-and-unicode-ó " )
oldest [ " Key " ] . should . equal ( " key-with-versions-and-unicode-ó " )
2017-05-14 17:00:26 +00:00
2018-01-05 20:12:45 +00:00
@mock_s3
def test_boto3_multiple_delete_markers ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
key = " key-with-versions-and-unicode-ó "
2018-01-05 20:12:45 +00:00
s3 . create_bucket ( Bucket = bucket_name )
s3 . put_bucket_versioning (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , VersioningConfiguration = { " Status " : " Enabled " }
2018-01-05 20:12:45 +00:00
)
2021-07-26 06:40:39 +00:00
items = ( b " v1 " , b " v2 " )
2018-01-05 20:12:45 +00:00
for body in items :
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = body )
2018-01-05 20:12:45 +00:00
# Delete the object twice to add multiple delete markers
s3 . delete_object ( Bucket = bucket_name , Key = key )
s3 . delete_object ( Bucket = bucket_name , Key = key )
response = s3 . list_object_versions ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
response [ " DeleteMarkers " ] . should . have . length_of ( 2 )
2018-01-05 20:12:45 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2019-10-31 15:44:26 +00:00
s3 . get_object ( Bucket = bucket_name , Key = key )
e . response [ " Error " ] [ " Code " ] . should . equal ( " 404 " )
2018-01-05 20:12:45 +00:00
# Remove both delete markers to restore the object
s3 . delete_object (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , Key = key , VersionId = response [ " DeleteMarkers " ] [ 0 ] [ " VersionId " ]
2018-01-05 20:12:45 +00:00
)
s3 . delete_object (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , Key = key , VersionId = response [ " DeleteMarkers " ] [ 1 ] [ " VersionId " ]
2018-01-05 20:12:45 +00:00
)
2019-10-31 15:44:26 +00:00
response = s3 . get_object ( Bucket = bucket_name , Key = key )
response [ " Body " ] . read ( ) . should . equal ( items [ - 1 ] )
2018-01-05 20:12:45 +00:00
response = s3 . list_object_versions ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
response [ " Versions " ] . should . have . length_of ( 2 )
2018-01-05 20:12:45 +00:00
# We've asserted there is only 2 records so one is newest, one is oldest
2019-10-31 15:44:26 +00:00
latest = list ( filter ( lambda item : item [ " IsLatest " ] , response [ " Versions " ] ) ) [ 0 ]
oldest = list ( filter ( lambda item : not item [ " IsLatest " ] , response [ " Versions " ] ) ) [ 0 ]
2018-01-05 20:12:45 +00:00
# Double check ordering of version ID's
2019-10-31 15:44:26 +00:00
latest [ " VersionId " ] . should_not . equal ( oldest [ " VersionId " ] )
2018-01-05 20:12:45 +00:00
# Double check the name is still unicode
2019-10-31 15:44:26 +00:00
latest [ " Key " ] . should . equal ( " key-with-versions-and-unicode-ó " )
oldest [ " Key " ] . should . equal ( " key-with-versions-and-unicode-ó " )
2018-01-05 20:12:45 +00:00
2019-05-25 19:19:33 +00:00
2017-06-03 23:06:49 +00:00
@mock_s3
def test_get_stream_gzipped ( ) :
2017-06-03 23:35:23 +00:00
payload = b " this is some stuff here "
2017-06-03 23:06:49 +00:00
2020-02-02 10:36:51 +00:00
s3_client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
s3_client . create_bucket ( Bucket = " moto-tests " )
2017-06-03 23:06:49 +00:00
buffer_ = BytesIO ( )
2019-10-31 15:44:26 +00:00
with GzipFile ( fileobj = buffer_ , mode = " w " ) as f :
2017-06-03 23:06:49 +00:00
f . write ( payload )
payload_gz = buffer_ . getvalue ( )
s3_client . put_object (
2019-10-31 15:44:26 +00:00
Bucket = " moto-tests " , Key = " keyname " , Body = payload_gz , ContentEncoding = " gzip "
2017-06-03 23:06:49 +00:00
)
2019-10-31 15:44:26 +00:00
obj = s3_client . get_object ( Bucket = " moto-tests " , Key = " keyname " )
res = zlib . decompress ( obj [ " Body " ] . read ( ) , 16 + zlib . MAX_WBITS )
2017-06-03 23:06:49 +00:00
assert res == payload
2015-11-04 23:55:41 +00:00
TEST_XML = """ \
< ? xml version = " 1.0 " encoding = " UTF-8 " ? >
< ns0 : WebsiteConfiguration xmlns : ns0 = " http://s3.amazonaws.com/doc/2006-03-01/ " >
< ns0 : IndexDocument >
< ns0 : Suffix > index . html < / ns0 : Suffix >
< / ns0 : IndexDocument >
< ns0 : RoutingRules >
< ns0 : RoutingRule >
< ns0 : Condition >
< ns0 : KeyPrefixEquals > test / testing < / ns0 : KeyPrefixEquals >
< / ns0 : Condition >
< ns0 : Redirect >
< ns0 : ReplaceKeyWith > test . txt < / ns0 : ReplaceKeyWith >
< / ns0 : Redirect >
< / ns0 : RoutingRule >
< / ns0 : RoutingRules >
< / ns0 : WebsiteConfiguration >
"""
2018-11-22 12:08:03 +00:00
2019-09-24 00:16:20 +00:00
2018-11-22 12:08:03 +00:00
@mock_s3
def test_boto3_bucket_name_too_long ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as exc :
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " x " * 64 )
2020-10-06 06:04:09 +00:00
exc . value . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidBucketName " )
2018-11-22 12:08:03 +00:00
2019-09-24 00:16:20 +00:00
2018-11-22 12:08:03 +00:00
@mock_s3
def test_boto3_bucket_name_too_short ( ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as exc :
2019-10-31 15:44:26 +00:00
s3 . create_bucket ( Bucket = " x " * 2 )
2020-10-06 06:04:09 +00:00
exc . value . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidBucketName " )
2019-05-25 10:19:00 +00:00
2019-09-24 00:16:20 +00:00
2019-05-25 10:19:00 +00:00
@mock_s3
def test_accelerated_none_when_unspecified ( ) :
2019-10-31 15:44:26 +00:00
bucket_name = " some_bucket "
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-05-25 10:19:00 +00:00
s3 . create_bucket ( Bucket = bucket_name )
resp = s3 . get_bucket_accelerate_configuration ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp . shouldnt . have . key ( " Status " )
2019-05-25 10:19:00 +00:00
2019-09-24 00:16:20 +00:00
2019-05-25 10:19:00 +00:00
@mock_s3
def test_can_enable_bucket_acceleration ( ) :
2019-10-31 15:44:26 +00:00
bucket_name = " some_bucket "
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-05-25 10:19:00 +00:00
s3 . create_bucket ( Bucket = bucket_name )
resp = s3 . put_bucket_accelerate_configuration (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , AccelerateConfiguration = { " Status " : " Enabled " }
2019-05-25 10:19:00 +00:00
)
2019-10-31 15:44:26 +00:00
resp . keys ( ) . should . have . length_of (
1
) # Response contains nothing (only HTTP headers)
2019-05-25 10:19:00 +00:00
resp = s3 . get_bucket_accelerate_configuration ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp . should . have . key ( " Status " )
resp [ " Status " ] . should . equal ( " Enabled " )
2019-05-25 10:19:00 +00:00
2019-09-24 00:16:20 +00:00
2019-05-25 10:19:00 +00:00
@mock_s3
def test_can_suspend_bucket_acceleration ( ) :
2019-10-31 15:44:26 +00:00
bucket_name = " some_bucket "
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-05-25 10:19:00 +00:00
s3 . create_bucket ( Bucket = bucket_name )
resp = s3 . put_bucket_accelerate_configuration (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , AccelerateConfiguration = { " Status " : " Enabled " }
2019-05-25 10:19:00 +00:00
)
resp = s3 . put_bucket_accelerate_configuration (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , AccelerateConfiguration = { " Status " : " Suspended " }
2019-05-25 10:19:00 +00:00
)
2019-10-31 15:44:26 +00:00
resp . keys ( ) . should . have . length_of (
1
) # Response contains nothing (only HTTP headers)
2019-05-25 10:19:00 +00:00
resp = s3 . get_bucket_accelerate_configuration ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp . should . have . key ( " Status " )
resp [ " Status " ] . should . equal ( " Suspended " )
2019-05-25 10:19:00 +00:00
2019-09-24 00:16:20 +00:00
2019-05-25 10:19:00 +00:00
@mock_s3
def test_suspending_acceleration_on_not_configured_bucket_does_nothing ( ) :
2019-10-31 15:44:26 +00:00
bucket_name = " some_bucket "
s3 = boto3 . client ( " s3 " )
2020-02-02 10:36:51 +00:00
s3 . create_bucket (
Bucket = bucket_name ,
CreateBucketConfiguration = { " LocationConstraint " : " us-west-1 " } ,
)
2019-05-25 10:19:00 +00:00
resp = s3 . put_bucket_accelerate_configuration (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , AccelerateConfiguration = { " Status " : " Suspended " }
2019-05-25 10:19:00 +00:00
)
2019-10-31 15:44:26 +00:00
resp . keys ( ) . should . have . length_of (
1
) # Response contains nothing (only HTTP headers)
2019-05-25 10:19:00 +00:00
resp = s3 . get_bucket_accelerate_configuration ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
resp . shouldnt . have . key ( " Status " )
2019-05-25 10:19:00 +00:00
2019-09-24 00:16:20 +00:00
2019-05-25 10:19:00 +00:00
@mock_s3
def test_accelerate_configuration_status_validation ( ) :
2019-10-31 15:44:26 +00:00
bucket_name = " some_bucket "
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-05-25 10:19:00 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as exc :
2019-05-25 10:19:00 +00:00
s3 . put_bucket_accelerate_configuration (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , AccelerateConfiguration = { " Status " : " bad_status " }
2019-05-25 10:19:00 +00:00
)
2020-10-06 06:04:09 +00:00
exc . value . response [ " Error " ] [ " Code " ] . should . equal ( " MalformedXML " )
2019-05-25 10:19:00 +00:00
2019-09-24 00:16:20 +00:00
2019-05-25 10:19:00 +00:00
@mock_s3
def test_accelerate_configuration_is_not_supported_when_bucket_name_has_dots ( ) :
2019-10-31 15:44:26 +00:00
bucket_name = " some.bucket.with.dots "
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-05-25 10:19:00 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as exc :
2019-05-25 10:19:00 +00:00
s3 . put_bucket_accelerate_configuration (
2019-10-31 15:44:26 +00:00
Bucket = bucket_name , AccelerateConfiguration = { " Status " : " Enabled " }
2019-05-25 10:19:00 +00:00
)
2020-10-06 06:04:09 +00:00
exc . value . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidRequest " )
2019-08-28 14:17:45 +00:00
2019-09-24 00:16:20 +00:00
2019-08-28 14:17:45 +00:00
def store_and_read_back_a_key ( key ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
body = b " Some body "
2019-08-28 14:17:45 +00:00
s3 . create_bucket ( Bucket = bucket_name )
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = body )
2019-08-28 14:17:45 +00:00
response = s3 . get_object ( Bucket = bucket_name , Key = key )
2019-10-31 15:44:26 +00:00
response [ " Body " ] . read ( ) . should . equal ( body )
2019-08-28 14:17:45 +00:00
2019-09-24 00:16:20 +00:00
2019-08-28 14:17:45 +00:00
@mock_s3
def test_paths_with_leading_slashes_work ( ) :
2019-10-31 15:44:26 +00:00
store_and_read_back_a_key ( " /a-key " )
2019-08-28 14:17:45 +00:00
2019-09-24 00:16:20 +00:00
2019-08-28 14:17:45 +00:00
@mock_s3
def test_root_dir_with_empty_name_works ( ) :
2020-05-01 12:50:11 +00:00
if settings . TEST_SERVER_MODE :
2019-10-31 15:44:26 +00:00
raise SkipTest ( " Does not work in server mode due to error in Workzeug " )
store_and_read_back_a_key ( " / " )
2019-09-24 20:22:25 +00:00
2020-11-11 15:54:01 +00:00
@pytest.mark.parametrize ( " bucket_name " , [ " mybucket " , " my.bucket " ] )
2020-04-20 18:54:31 +00:00
@mock_s3
def test_leading_slashes_not_removed ( bucket_name ) :
""" Make sure that leading slashes are not removed internally. """
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = bucket_name )
2020-04-25 13:10:23 +00:00
uploaded_key = " /key "
invalid_key_1 = " key "
invalid_key_2 = " //key "
2020-04-20 18:54:31 +00:00
2020-04-25 13:10:23 +00:00
s3 . put_object ( Bucket = bucket_name , Key = uploaded_key , Body = b " Some body " )
2020-04-20 18:54:31 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2020-04-20 18:54:31 +00:00
s3 . get_object ( Bucket = bucket_name , Key = invalid_key_1 )
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchKey " )
2020-04-20 18:54:31 +00:00
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2020-04-20 18:54:31 +00:00
s3 . get_object ( Bucket = bucket_name , Key = invalid_key_2 )
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchKey " )
2020-04-20 18:54:31 +00:00
2020-11-11 15:55:37 +00:00
@pytest.mark.parametrize (
" key " , [ " foo/bar/baz " , " foo " , " foo/run_dt % 3D2019-01-01 %252012% 253A30 % 253A00 " ]
2019-10-31 15:44:26 +00:00
)
2019-09-24 20:22:25 +00:00
@mock_s3
def test_delete_objects_with_url_encoded_key ( key ) :
2020-02-02 10:36:51 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-10-31 15:44:26 +00:00
bucket_name = " mybucket "
body = b " Some body "
2019-09-24 20:22:25 +00:00
s3 . create_bucket ( Bucket = bucket_name )
def put_object ( ) :
2019-10-31 15:44:26 +00:00
s3 . put_object ( Bucket = bucket_name , Key = key , Body = body )
2019-09-24 20:22:25 +00:00
def assert_deleted ( ) :
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as e :
2019-09-24 20:22:25 +00:00
s3 . get_object ( Bucket = bucket_name , Key = key )
2020-10-06 06:04:09 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchKey " )
2019-09-24 20:22:25 +00:00
put_object ( )
s3 . delete_object ( Bucket = bucket_name , Key = key )
assert_deleted ( )
put_object ( )
2019-10-31 15:44:26 +00:00
s3 . delete_objects ( Bucket = bucket_name , Delete = { " Objects " : [ { " Key " : key } ] } )
2019-09-24 20:22:25 +00:00
assert_deleted ( )
2019-09-24 00:16:20 +00:00
2021-06-25 09:26:03 +00:00
@mock_s3
def test_delete_objects_unknown_key ( ) :
bucket_name = " test-moto-issue-1581 "
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = bucket_name )
client . put_object ( Bucket = bucket_name , Key = " file1 " , Body = " body " )
s = client . delete_objects (
Bucket = bucket_name , Delete = { " Objects " : [ { " Key " : " file1 " } , { " Key " : " file2 " } ] }
)
s [ " Deleted " ] . should . have . length_of ( 2 )
s [ " Deleted " ] . should . contain ( { " Key " : " file1 " } )
s [ " Deleted " ] . should . contain ( { " Key " : " file2 " } )
client . delete_bucket ( Bucket = bucket_name )
2019-12-10 01:38:26 +00:00
@mock_s3
@mock_config
def test_public_access_block ( ) :
2020-02-02 10:36:51 +00:00
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2019-12-10 01:38:26 +00:00
client . create_bucket ( Bucket = " mybucket " )
# Try to get the public access block (should not exist by default)
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as ce :
2019-12-10 01:38:26 +00:00
client . get_public_access_block ( Bucket = " mybucket " )
2020-10-06 06:46:05 +00:00
assert ce . value . response [ " Error " ] [ " Code " ] == " NoSuchPublicAccessBlockConfiguration "
2019-12-10 01:38:26 +00:00
assert (
2020-10-06 06:04:09 +00:00
ce . value . response [ " Error " ] [ " Message " ]
2019-12-10 01:38:26 +00:00
== " The public access block configuration was not found "
)
2020-10-06 06:04:09 +00:00
assert ce . value . response [ " ResponseMetadata " ] [ " HTTPStatusCode " ] == 404
2019-12-10 01:38:26 +00:00
# Put a public block in place:
test_map = {
" BlockPublicAcls " : False ,
" IgnorePublicAcls " : False ,
" BlockPublicPolicy " : False ,
" RestrictPublicBuckets " : False ,
}
for field in test_map . keys ( ) :
# Toggle:
test_map [ field ] = True
client . put_public_access_block (
Bucket = " mybucket " , PublicAccessBlockConfiguration = test_map
)
# Test:
assert (
test_map
== client . get_public_access_block ( Bucket = " mybucket " ) [
" PublicAccessBlockConfiguration "
]
)
# Assume missing values are default False:
client . put_public_access_block (
Bucket = " mybucket " , PublicAccessBlockConfiguration = { " BlockPublicAcls " : True }
)
assert client . get_public_access_block ( Bucket = " mybucket " ) [
" PublicAccessBlockConfiguration "
] == {
" BlockPublicAcls " : True ,
" IgnorePublicAcls " : False ,
" BlockPublicPolicy " : False ,
" RestrictPublicBuckets " : False ,
}
# Test with a blank PublicAccessBlockConfiguration:
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as ce :
2019-12-10 01:38:26 +00:00
client . put_public_access_block (
Bucket = " mybucket " , PublicAccessBlockConfiguration = { }
)
2020-10-06 06:04:09 +00:00
assert ce . value . response [ " Error " ] [ " Code " ] == " InvalidRequest "
2019-12-10 01:38:26 +00:00
assert (
2020-10-06 06:04:09 +00:00
ce . value . response [ " Error " ] [ " Message " ]
2019-12-10 01:38:26 +00:00
== " Must specify at least one configuration. "
)
2020-10-06 06:04:09 +00:00
assert ce . value . response [ " ResponseMetadata " ] [ " HTTPStatusCode " ] == 400
2019-12-10 01:38:26 +00:00
# Test that things work with AWS Config:
2020-02-02 10:36:51 +00:00
config_client = boto3 . client ( " config " , region_name = DEFAULT_REGION_NAME )
2019-12-10 01:38:26 +00:00
result = config_client . get_resource_config_history (
resourceType = " AWS::S3::Bucket " , resourceId = " mybucket "
)
pub_block_config = json . loads (
result [ " configurationItems " ] [ 0 ] [ " supplementaryConfiguration " ] [
" PublicAccessBlockConfiguration "
]
)
assert pub_block_config == {
" blockPublicAcls " : True ,
" ignorePublicAcls " : False ,
" blockPublicPolicy " : False ,
" restrictPublicBuckets " : False ,
}
# Delete:
client . delete_public_access_block ( Bucket = " mybucket " )
2020-10-06 05:54:49 +00:00
with pytest . raises ( ClientError ) as ce :
2019-12-10 01:38:26 +00:00
client . get_public_access_block ( Bucket = " mybucket " )
2020-10-06 06:46:05 +00:00
assert ce . value . response [ " Error " ] [ " Code " ] == " NoSuchPublicAccessBlockConfiguration "
2019-12-10 01:38:26 +00:00
@mock_s3
def test_s3_public_access_block_to_config_dict ( ) :
from moto . s3 . config import s3_config_query
# With 1 bucket in us-west-2:
s3_config_query . backends [ " global " ] . create_bucket ( " bucket1 " , " us-west-2 " )
public_access_block = {
" BlockPublicAcls " : " True " ,
" IgnorePublicAcls " : " False " ,
" BlockPublicPolicy " : " True " ,
" RestrictPublicBuckets " : " False " ,
}
# Add a public access block:
s3_config_query . backends [ " global " ] . put_bucket_public_access_block (
" bucket1 " , public_access_block
)
result = (
s3_config_query . backends [ " global " ]
. buckets [ " bucket1 " ]
. public_access_block . to_config_dict ( )
)
convert_bool = lambda x : x == " True "
for key , value in public_access_block . items ( ) :
assert result [
" {lowercase} {rest} " . format ( lowercase = key [ 0 ] . lower ( ) , rest = key [ 1 : ] )
] == convert_bool ( value )
# Verify that this resides in the full bucket's to_config_dict:
full_result = s3_config_query . backends [ " global " ] . buckets [ " bucket1 " ] . to_config_dict ( )
assert (
json . loads (
full_result [ " supplementaryConfiguration " ] [ " PublicAccessBlockConfiguration " ]
)
== result
)
2019-09-24 00:16:20 +00:00
@mock_s3
def test_list_config_discovered_resources ( ) :
from moto . s3 . config import s3_config_query
# Without any buckets:
2019-10-31 15:44:26 +00:00
assert s3_config_query . list_config_service_resources (
" global " , " global " , None , None , 100 , None
) == ( [ ] , None )
2019-09-24 00:16:20 +00:00
# With 10 buckets in us-west-2:
for x in range ( 0 , 10 ) :
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . create_bucket (
" bucket {} " . format ( x ) , " us-west-2 "
)
2019-09-24 00:16:20 +00:00
# With 2 buckets in eu-west-1:
for x in range ( 10 , 12 ) :
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . create_bucket (
" eu-bucket {} " . format ( x ) , " eu-west-1 "
)
2019-09-24 00:16:20 +00:00
2019-10-31 15:44:26 +00:00
result , next_token = s3_config_query . list_config_service_resources (
None , None , 100 , None
)
2019-09-24 00:16:20 +00:00
assert not next_token
assert len ( result ) == 12
for x in range ( 0 , 10 ) :
assert result [ x ] == {
2019-10-31 15:44:26 +00:00
" type " : " AWS::S3::Bucket " ,
" id " : " bucket {} " . format ( x ) ,
" name " : " bucket {} " . format ( x ) ,
" region " : " us-west-2 " ,
2019-09-24 00:16:20 +00:00
}
for x in range ( 10 , 12 ) :
assert result [ x ] == {
2019-10-31 15:44:26 +00:00
" type " : " AWS::S3::Bucket " ,
" id " : " eu-bucket {} " . format ( x ) ,
" name " : " eu-bucket {} " . format ( x ) ,
" region " : " eu-west-1 " ,
2019-09-24 00:16:20 +00:00
}
# With a name:
2019-10-31 15:44:26 +00:00
result , next_token = s3_config_query . list_config_service_resources (
None , " bucket0 " , 100 , None
)
assert len ( result ) == 1 and result [ 0 ] [ " name " ] == " bucket0 " and not next_token
2019-09-24 00:16:20 +00:00
# With a region:
2019-10-31 15:44:26 +00:00
result , next_token = s3_config_query . list_config_service_resources (
None , None , 100 , None , resource_region = " eu-west-1 "
)
assert len ( result ) == 2 and not next_token and result [ 1 ] [ " name " ] == " eu-bucket11 "
2019-09-24 00:16:20 +00:00
# With resource ids:
2019-10-31 15:44:26 +00:00
result , next_token = s3_config_query . list_config_service_resources (
[ " bucket0 " , " bucket1 " ] , None , 100 , None
)
assert (
len ( result ) == 2
and result [ 0 ] [ " name " ] == " bucket0 "
and result [ 1 ] [ " name " ] == " bucket1 "
and not next_token
)
2019-09-24 00:16:20 +00:00
# With duplicated resource ids:
2019-10-31 15:44:26 +00:00
result , next_token = s3_config_query . list_config_service_resources (
[ " bucket0 " , " bucket0 " ] , None , 100 , None
)
assert len ( result ) == 1 and result [ 0 ] [ " name " ] == " bucket0 " and not next_token
2019-09-24 00:16:20 +00:00
# Pagination:
2019-10-31 15:44:26 +00:00
result , next_token = s3_config_query . list_config_service_resources (
None , None , 1 , None
)
assert (
len ( result ) == 1 and result [ 0 ] [ " name " ] == " bucket0 " and next_token == " bucket1 "
)
2019-09-24 00:16:20 +00:00
# Last Page:
2019-10-31 15:44:26 +00:00
result , next_token = s3_config_query . list_config_service_resources (
None , None , 1 , " eu-bucket11 " , resource_region = " eu-west-1 "
)
assert len ( result ) == 1 and result [ 0 ] [ " name " ] == " eu-bucket11 " and not next_token
2019-09-24 00:16:20 +00:00
# With a list of buckets:
2019-10-31 15:44:26 +00:00
result , next_token = s3_config_query . list_config_service_resources (
[ " bucket0 " , " bucket1 " ] , None , 1 , None
)
assert (
len ( result ) == 1 and result [ 0 ] [ " name " ] == " bucket0 " and next_token == " bucket1 "
)
2019-09-24 00:16:20 +00:00
# With an invalid page:
2020-10-06 05:54:49 +00:00
with pytest . raises ( InvalidNextTokenException ) as inte :
2019-10-31 15:44:26 +00:00
s3_config_query . list_config_service_resources ( None , None , 1 , " notabucket " )
2019-09-24 00:16:20 +00:00
2020-10-06 06:04:09 +00:00
assert " The nextToken provided is invalid " in inte . value . message
2019-10-04 01:00:07 +00:00
@mock_s3
def test_s3_lifecycle_config_dict ( ) :
from moto . s3 . config import s3_config_query
# With 1 bucket in us-west-2:
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . create_bucket ( " bucket1 " , " us-west-2 " )
2019-10-04 01:00:07 +00:00
# And a lifecycle policy
lifecycle = [
{
2019-10-31 15:44:26 +00:00
" ID " : " rule1 " ,
" Status " : " Enabled " ,
" Filter " : { " Prefix " : " " } ,
" Expiration " : { " Days " : 1 } ,
2019-10-04 01:00:07 +00:00
} ,
{
2019-10-31 15:44:26 +00:00
" ID " : " rule2 " ,
" Status " : " Enabled " ,
" Filter " : {
" And " : {
" Prefix " : " some/path " ,
" Tag " : [ { " Key " : " TheKey " , " Value " : " TheValue " } ] ,
2019-10-04 01:00:07 +00:00
}
} ,
2019-10-31 15:44:26 +00:00
" Expiration " : { " Days " : 1 } ,
2019-10-04 01:00:07 +00:00
} ,
2019-10-31 15:44:26 +00:00
{ " ID " : " rule3 " , " Status " : " Enabled " , " Filter " : { } , " Expiration " : { " Days " : 1 } } ,
2019-10-04 01:00:07 +00:00
{
2019-10-31 15:44:26 +00:00
" ID " : " rule4 " ,
" Status " : " Enabled " ,
" Filter " : { " Prefix " : " " } ,
" AbortIncompleteMultipartUpload " : { " DaysAfterInitiation " : 1 } ,
2019-10-04 01:00:07 +00:00
} ,
]
2021-08-21 14:05:40 +00:00
s3_config_query . backends [ " global " ] . put_bucket_lifecycle ( " bucket1 " , lifecycle )
2019-10-04 01:00:07 +00:00
# Get the rules for this:
2019-10-31 15:44:26 +00:00
lifecycles = [
rule . to_config_dict ( )
for rule in s3_config_query . backends [ " global " ] . buckets [ " bucket1 " ] . rules
]
2019-10-04 01:00:07 +00:00
# Verify the first:
assert lifecycles [ 0 ] == {
2019-10-31 15:44:26 +00:00
" id " : " rule1 " ,
" prefix " : None ,
" status " : " Enabled " ,
" expirationInDays " : 1 ,
" expiredObjectDeleteMarker " : None ,
" noncurrentVersionExpirationInDays " : - 1 ,
" expirationDate " : None ,
" transitions " : None ,
" noncurrentVersionTransitions " : None ,
" abortIncompleteMultipartUpload " : None ,
" filter " : { " predicate " : { " type " : " LifecyclePrefixPredicate " , " prefix " : " " } } ,
2019-10-04 01:00:07 +00:00
}
# Verify the second:
assert lifecycles [ 1 ] == {
2019-10-31 15:44:26 +00:00
" id " : " rule2 " ,
" prefix " : None ,
" status " : " Enabled " ,
" expirationInDays " : 1 ,
" expiredObjectDeleteMarker " : None ,
" noncurrentVersionExpirationInDays " : - 1 ,
" expirationDate " : None ,
" transitions " : None ,
" noncurrentVersionTransitions " : None ,
" abortIncompleteMultipartUpload " : None ,
" filter " : {
" predicate " : {
" type " : " LifecycleAndOperator " ,
" operands " : [
{ " type " : " LifecyclePrefixPredicate " , " prefix " : " some/path " } ,
2019-10-04 01:00:07 +00:00
{
2019-10-31 15:44:26 +00:00
" type " : " LifecycleTagPredicate " ,
" tag " : { " key " : " TheKey " , " value " : " TheValue " } ,
2019-10-04 01:00:07 +00:00
} ,
2019-10-31 15:44:26 +00:00
] ,
2019-10-04 01:00:07 +00:00
}
2019-10-31 15:44:26 +00:00
} ,
2019-10-04 01:00:07 +00:00
}
# And the third:
assert lifecycles [ 2 ] == {
2019-10-31 15:44:26 +00:00
" id " : " rule3 " ,
" prefix " : None ,
" status " : " Enabled " ,
" expirationInDays " : 1 ,
" expiredObjectDeleteMarker " : None ,
" noncurrentVersionExpirationInDays " : - 1 ,
" expirationDate " : None ,
" transitions " : None ,
" noncurrentVersionTransitions " : None ,
" abortIncompleteMultipartUpload " : None ,
" filter " : { " predicate " : None } ,
2019-10-04 01:00:07 +00:00
}
# And the last:
assert lifecycles [ 3 ] == {
2019-10-31 15:44:26 +00:00
" id " : " rule4 " ,
" prefix " : None ,
" status " : " Enabled " ,
" expirationInDays " : None ,
" expiredObjectDeleteMarker " : None ,
" noncurrentVersionExpirationInDays " : - 1 ,
" expirationDate " : None ,
" transitions " : None ,
" noncurrentVersionTransitions " : None ,
" abortIncompleteMultipartUpload " : { " daysAfterInitiation " : 1 } ,
" filter " : { " predicate " : { " type " : " LifecyclePrefixPredicate " , " prefix " : " " } } ,
2019-10-04 01:00:07 +00:00
}
@mock_s3
def test_s3_notification_config_dict ( ) :
from moto . s3 . config import s3_config_query
# With 1 bucket in us-west-2:
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . create_bucket ( " bucket1 " , " us-west-2 " )
2019-10-04 01:00:07 +00:00
# And some notifications:
notifications = {
2019-10-31 15:44:26 +00:00
" TopicConfiguration " : [
{
" Id " : " Topic " ,
" Topic " : " arn:aws:sns:us-west-2:012345678910:mytopic " ,
" Event " : [
" s3:ReducedRedundancyLostObject " ,
" s3:ObjectRestore:Completed " ,
] ,
2019-10-04 01:00:07 +00:00
}
2019-10-31 15:44:26 +00:00
] ,
" QueueConfiguration " : [
{
" Id " : " Queue " ,
" Queue " : " arn:aws:sqs:us-west-2:012345678910:myqueue " ,
" Event " : [ " s3:ObjectRemoved:Delete " ] ,
" Filter " : {
" S3Key " : {
" FilterRule " : [ { " Name " : " prefix " , " Value " : " stuff/here/ " } ]
}
} ,
}
] ,
" CloudFunctionConfiguration " : [
{
" Id " : " Lambda " ,
" CloudFunction " : " arn:aws:lambda:us-west-2:012345678910:function:mylambda " ,
" Event " : [
" s3:ObjectCreated:Post " ,
" s3:ObjectCreated:Copy " ,
" s3:ObjectCreated:Put " ,
] ,
" Filter " : {
" S3Key " : { " FilterRule " : [ { " Name " : " suffix " , " Value " : " .png " } ] }
} ,
2019-10-04 01:00:07 +00:00
}
2019-10-31 15:44:26 +00:00
] ,
2019-10-04 01:00:07 +00:00
}
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . put_bucket_notification_configuration (
" bucket1 " , notifications
)
2019-10-04 01:00:07 +00:00
# Get the notifications for this:
2019-10-31 15:44:26 +00:00
notifications = (
s3_config_query . backends [ " global " ]
. buckets [ " bucket1 " ]
. notification_configuration . to_config_dict ( )
)
2019-10-04 01:00:07 +00:00
# Verify it all:
assert notifications == {
2019-10-31 15:44:26 +00:00
" configurations " : {
" Topic " : {
" events " : [
" s3:ReducedRedundancyLostObject " ,
" s3:ObjectRestore:Completed " ,
] ,
" filter " : None ,
" objectPrefixes " : [ ] ,
" topicARN " : " arn:aws:sns:us-west-2:012345678910:mytopic " ,
" type " : " TopicConfiguration " ,
2019-10-04 01:00:07 +00:00
} ,
2019-10-31 15:44:26 +00:00
" Queue " : {
" events " : [ " s3:ObjectRemoved:Delete " ] ,
" filter " : {
" s3KeyFilter " : {
" filterRules " : [ { " name " : " prefix " , " value " : " stuff/here/ " } ]
2019-10-04 01:00:07 +00:00
}
} ,
2019-10-31 15:44:26 +00:00
" objectPrefixes " : [ ] ,
" queueARN " : " arn:aws:sqs:us-west-2:012345678910:myqueue " ,
" type " : " QueueConfiguration " ,
2019-10-04 01:00:07 +00:00
} ,
2019-10-31 15:44:26 +00:00
" Lambda " : {
" events " : [
" s3:ObjectCreated:Post " ,
" s3:ObjectCreated:Copy " ,
" s3:ObjectCreated:Put " ,
] ,
" filter " : {
" s3KeyFilter " : {
" filterRules " : [ { " name " : " suffix " , " value " : " .png " } ]
2019-10-04 01:00:07 +00:00
}
} ,
2019-10-31 15:44:26 +00:00
" objectPrefixes " : [ ] ,
" queueARN " : " arn:aws:lambda:us-west-2:012345678910:function:mylambda " ,
" type " : " LambdaConfiguration " ,
} ,
2019-10-04 01:00:07 +00:00
}
}
@mock_s3
def test_s3_acl_to_config_dict ( ) :
from moto . s3 . config import s3_config_query
from moto . s3 . models import FakeAcl , FakeGrant , FakeGrantee , OWNER
# With 1 bucket in us-west-2:
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . create_bucket ( " logbucket " , " us-west-2 " )
2019-10-04 01:00:07 +00:00
# Get the config dict with nothing other than the owner details:
2019-10-31 15:44:26 +00:00
acls = s3_config_query . backends [ " global " ] . buckets [ " logbucket " ] . acl . to_config_dict ( )
2021-06-04 16:21:59 +00:00
owner_acl = {
" grantee " : { " id " : OWNER , " displayName " : None } ,
" permission " : " FullControl " ,
}
assert acls == {
" grantSet " : None ,
" owner " : { " displayName " : None , " id " : OWNER } ,
" grantList " : [ owner_acl ] ,
}
2019-10-04 01:00:07 +00:00
# Add some Log Bucket ACLs:
2019-10-31 15:44:26 +00:00
log_acls = FakeAcl (
[
FakeGrant (
[ FakeGrantee ( uri = " http://acs.amazonaws.com/groups/s3/LogDelivery " ) ] ,
" WRITE " ,
) ,
FakeGrant (
[ FakeGrantee ( uri = " http://acs.amazonaws.com/groups/s3/LogDelivery " ) ] ,
" READ_ACP " ,
) ,
2022-01-14 19:51:49 +00:00
FakeGrant ( [ FakeGrantee ( grantee_id = OWNER ) ] , " FULL_CONTROL " ) ,
2019-10-31 15:44:26 +00:00
]
)
2021-08-21 14:05:40 +00:00
s3_config_query . backends [ " global " ] . put_bucket_acl ( " logbucket " , log_acls )
2019-10-31 15:44:26 +00:00
acls = s3_config_query . backends [ " global " ] . buckets [ " logbucket " ] . acl . to_config_dict ( )
2019-10-04 01:00:07 +00:00
assert acls == {
2019-10-31 15:44:26 +00:00
" grantSet " : None ,
" grantList " : [
{ " grantee " : " LogDelivery " , " permission " : " Write " } ,
{ " grantee " : " LogDelivery " , " permission " : " ReadAcp " } ,
2021-06-04 16:21:59 +00:00
{
" grantee " : {
" displayName " : None ,
" id " : " 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a " ,
} ,
" permission " : " FullControl " ,
} ,
2019-10-31 15:44:26 +00:00
] ,
" owner " : { " displayName " : None , " id " : OWNER } ,
2019-10-04 01:00:07 +00:00
}
# Give the owner less than full_control permissions:
2019-10-31 15:44:26 +00:00
log_acls = FakeAcl (
[
2022-01-14 19:51:49 +00:00
FakeGrant ( [ FakeGrantee ( grantee_id = OWNER ) ] , " READ_ACP " ) ,
FakeGrant ( [ FakeGrantee ( grantee_id = OWNER ) ] , " WRITE_ACP " ) ,
2019-10-31 15:44:26 +00:00
]
)
2021-08-21 14:05:40 +00:00
s3_config_query . backends [ " global " ] . put_bucket_acl ( " logbucket " , log_acls )
2019-10-31 15:44:26 +00:00
acls = s3_config_query . backends [ " global " ] . buckets [ " logbucket " ] . acl . to_config_dict ( )
2019-10-04 01:00:07 +00:00
assert acls == {
2019-10-31 15:44:26 +00:00
" grantSet " : None ,
" grantList " : [
{ " grantee " : { " id " : OWNER , " displayName " : None } , " permission " : " ReadAcp " } ,
{ " grantee " : { " id " : OWNER , " displayName " : None } , " permission " : " WriteAcp " } ,
2019-10-04 01:00:07 +00:00
] ,
2019-10-31 15:44:26 +00:00
" owner " : { " displayName " : None , " id " : OWNER } ,
2019-10-04 01:00:07 +00:00
}
@mock_s3
def test_s3_config_dict ( ) :
from moto . s3 . config import s3_config_query
2019-10-31 15:44:26 +00:00
from moto . s3 . models import (
FakeAcl ,
FakeGrant ,
FakeGrantee ,
OWNER ,
)
2019-10-04 01:00:07 +00:00
# Without any buckets:
2019-10-31 15:44:26 +00:00
assert not s3_config_query . get_config_resource ( " some_bucket " )
2019-10-04 01:00:07 +00:00
2020-04-01 14:35:25 +00:00
tags = { " someTag " : " someValue " , " someOtherTag " : " someOtherValue " }
2019-10-04 01:00:07 +00:00
# With 1 bucket in us-west-2:
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . create_bucket ( " bucket1 " , " us-west-2 " )
2020-06-06 12:15:50 +00:00
s3_config_query . backends [ " global " ] . put_bucket_tagging ( " bucket1 " , tags )
2019-10-04 01:00:07 +00:00
# With a log bucket:
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . create_bucket ( " logbucket " , " us-west-2 " )
log_acls = FakeAcl (
[
FakeGrant (
[ FakeGrantee ( uri = " http://acs.amazonaws.com/groups/s3/LogDelivery " ) ] ,
" WRITE " ,
) ,
FakeGrant (
[ FakeGrantee ( uri = " http://acs.amazonaws.com/groups/s3/LogDelivery " ) ] ,
" READ_ACP " ,
) ,
2022-01-14 19:51:49 +00:00
FakeGrant ( [ FakeGrantee ( grantee_id = OWNER ) ] , " FULL_CONTROL " ) ,
2019-10-22 18:55:50 +00:00
]
2019-10-31 15:44:26 +00:00
)
2021-08-21 14:05:40 +00:00
s3_config_query . backends [ " global " ] . put_bucket_acl ( " logbucket " , log_acls )
2019-10-31 15:44:26 +00:00
s3_config_query . backends [ " global " ] . put_bucket_logging (
" bucket1 " , { " TargetBucket " : " logbucket " , " TargetPrefix " : " " }
)
policy = json . dumps (
{
" Statement " : [
{
" Effect " : " Deny " ,
" Action " : " s3:DeleteObject " ,
" Principal " : " * " ,
" Resource " : " arn:aws:s3:::bucket1/* " ,
}
]
}
)
2019-10-22 18:55:50 +00:00
2021-07-29 05:38:16 +00:00
# The policy is a byte array -- need to encode in Python 3
pass_policy = bytes ( policy , " utf-8 " )
2021-08-21 14:05:40 +00:00
s3_config_query . backends [ " global " ] . put_bucket_policy ( " bucket1 " , pass_policy )
2019-10-22 18:55:50 +00:00
2019-10-04 01:00:07 +00:00
# Get the us-west-2 bucket and verify that it works properly:
2019-10-31 15:44:26 +00:00
bucket1_result = s3_config_query . get_config_resource ( " bucket1 " )
2019-10-04 01:00:07 +00:00
# Just verify a few things:
2019-10-31 15:44:26 +00:00
assert bucket1_result [ " arn " ] == " arn:aws:s3:::bucket1 "
assert bucket1_result [ " awsRegion " ] == " us-west-2 "
assert bucket1_result [ " resourceName " ] == bucket1_result [ " resourceId " ] == " bucket1 "
assert bucket1_result [ " tags " ] == {
" someTag " : " someValue " ,
" someOtherTag " : " someOtherValue " ,
}
assert json . loads (
bucket1_result [ " supplementaryConfiguration " ] [ " BucketTaggingConfiguration " ]
) == { " tagSets " : [ { " tags " : bucket1_result [ " tags " ] } ] }
assert isinstance ( bucket1_result [ " configuration " ] , str )
exist_list = [
" AccessControlList " ,
" BucketAccelerateConfiguration " ,
" BucketLoggingConfiguration " ,
" BucketPolicy " ,
" IsRequesterPaysEnabled " ,
" BucketNotificationConfiguration " ,
]
2019-10-04 01:00:07 +00:00
for exist in exist_list :
2019-10-31 15:44:26 +00:00
assert isinstance ( bucket1_result [ " supplementaryConfiguration " ] [ exist ] , str )
2019-10-04 01:00:07 +00:00
# Verify the logging config:
2019-10-31 15:44:26 +00:00
assert json . loads (
bucket1_result [ " supplementaryConfiguration " ] [ " BucketLoggingConfiguration " ]
) == { " destinationBucketName " : " logbucket " , " logFilePrefix " : " " }
2019-10-04 01:00:07 +00:00
2019-10-24 01:37:35 +00:00
# Verify that the AccessControlList is a double-wrapped JSON string:
2019-10-31 15:44:26 +00:00
assert json . loads (
json . loads ( bucket1_result [ " supplementaryConfiguration " ] [ " AccessControlList " ] )
) == {
" grantSet " : None ,
2021-06-04 16:21:59 +00:00
" grantList " : [
{
" grantee " : {
" displayName " : None ,
" id " : " 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a " ,
} ,
" permission " : " FullControl " ,
} ,
] ,
2019-10-31 15:44:26 +00:00
" owner " : {
" displayName " : None ,
" id " : " 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a " ,
} ,
}
2019-10-24 01:37:35 +00:00
2019-10-04 01:00:07 +00:00
# Verify the policy:
2019-10-31 15:44:26 +00:00
assert json . loads ( bucket1_result [ " supplementaryConfiguration " ] [ " BucketPolicy " ] ) == {
" policyText " : policy
}
2019-10-04 01:00:07 +00:00
# Filter by correct region:
2019-10-31 15:44:26 +00:00
assert bucket1_result == s3_config_query . get_config_resource (
" bucket1 " , resource_region = " us-west-2 "
)
2019-10-04 01:00:07 +00:00
# By incorrect region:
2019-10-31 15:44:26 +00:00
assert not s3_config_query . get_config_resource (
" bucket1 " , resource_region = " eu-west-1 "
)
2019-10-04 01:00:07 +00:00
# With correct resource ID and name:
2019-10-31 15:44:26 +00:00
assert bucket1_result == s3_config_query . get_config_resource (
" bucket1 " , resource_name = " bucket1 "
)
2019-10-04 01:00:07 +00:00
# With an incorrect resource name:
2019-10-31 15:44:26 +00:00
assert not s3_config_query . get_config_resource (
" bucket1 " , resource_name = " eu-bucket-1 "
)
2019-10-22 18:55:50 +00:00
# Verify that no bucket policy returns the proper value:
2019-10-31 15:44:26 +00:00
logging_bucket = s3_config_query . get_config_resource ( " logbucket " )
assert json . loads ( logging_bucket [ " supplementaryConfiguration " ] [ " BucketPolicy " ] ) == {
" policyText " : None
}
assert not logging_bucket [ " tags " ]
assert not logging_bucket [ " supplementaryConfiguration " ] . get (
" BucketTaggingConfiguration "
)
2019-05-29 20:22:29 +00:00
@mock_s3
def test_creating_presigned_post ( ) :
2020-04-22 02:43:32 +00:00
bucket = " presigned-test "
s3 = boto3 . client ( " s3 " , region_name = " us-east-1 " )
2019-05-29 20:22:29 +00:00
s3 . create_bucket ( Bucket = bucket )
2020-04-22 02:43:32 +00:00
success_url = " http://localhost/completed "
fdata = b " test data \n "
2019-05-29 20:22:29 +00:00
file_uid = uuid . uuid4 ( )
conditions = [
2020-04-22 02:43:32 +00:00
{ " Content-Type " : " text/plain " } ,
2019-05-29 20:22:29 +00:00
{ " x-amz-server-side-encryption " : " AES256 " } ,
2020-04-22 02:43:32 +00:00
{ " success_action_redirect " : success_url } ,
2019-05-29 20:22:29 +00:00
]
conditions . append ( [ " content-length-range " , 1 , 30 ] )
2021-02-10 09:06:03 +00:00
real_key = " {file_uid} .txt " . format ( file_uid = file_uid )
2019-05-29 20:22:29 +00:00
data = s3 . generate_presigned_post (
Bucket = bucket ,
2021-02-10 09:06:03 +00:00
Key = real_key ,
2019-05-29 20:22:29 +00:00
Fields = {
2020-04-22 02:43:32 +00:00
" content-type " : " text/plain " ,
" success_action_redirect " : success_url ,
" x-amz-server-side-encryption " : " AES256 " ,
2019-05-29 20:22:29 +00:00
} ,
Conditions = conditions ,
ExpiresIn = 1000 ,
)
2020-04-22 02:43:32 +00:00
resp = requests . post (
data [ " url " ] , data = data [ " fields " ] , files = { " file " : fdata } , allow_redirects = False
)
2019-05-29 20:22:29 +00:00
assert resp . status_code == 303
2021-02-10 09:06:03 +00:00
redirect = resp . headers [ " Location " ]
assert redirect . startswith ( success_url )
parts = urlparse ( redirect )
args = parse_qs ( parts . query )
assert args [ " key " ] [ 0 ] == real_key
assert args [ " bucket " ] [ 0 ] == bucket
assert s3 . get_object ( Bucket = bucket , Key = real_key ) [ " Body " ] . read ( ) == fdata
2020-05-27 16:21:03 +00:00
2020-07-12 12:33:46 +00:00
@mock_s3
def test_presigned_put_url_with_approved_headers ( ) :
bucket = str ( uuid . uuid4 ( ) )
key = " file.txt "
2020-07-12 17:29:10 +00:00
content = b " filecontent "
2020-07-12 12:33:46 +00:00
expected_contenttype = " app/sth "
conn = boto3 . resource ( " s3 " , region_name = " us-east-1 " )
conn . create_bucket ( Bucket = bucket )
s3 = boto3 . client ( " s3 " , region_name = " us-east-1 " )
# Create a pre-signed url with some metadata.
url = s3 . generate_presigned_url (
ClientMethod = " put_object " ,
Params = { " Bucket " : bucket , " Key " : key , " ContentType " : expected_contenttype } ,
)
# Verify S3 throws an error when the header is not provided
2020-07-12 17:29:10 +00:00
response = requests . put ( url , data = content )
2020-07-12 12:33:46 +00:00
response . status_code . should . equal ( 403 )
2020-07-12 17:29:10 +00:00
str ( response . content ) . should . contain ( " <Code>SignatureDoesNotMatch</Code> " )
str ( response . content ) . should . contain (
2020-07-12 12:33:46 +00:00
" <Message>The request signature we calculated does not match the signature you provided. Check your key and signing method.</Message> "
)
# Verify S3 throws an error when the header has the wrong value
response = requests . put (
2020-07-12 17:29:10 +00:00
url , data = content , headers = { " Content-Type " : " application/unknown " }
2020-07-12 12:33:46 +00:00
)
response . status_code . should . equal ( 403 )
2020-07-12 17:29:10 +00:00
str ( response . content ) . should . contain ( " <Code>SignatureDoesNotMatch</Code> " )
str ( response . content ) . should . contain (
2020-07-12 12:33:46 +00:00
" <Message>The request signature we calculated does not match the signature you provided. Check your key and signing method.</Message> "
)
# Verify S3 uploads correctly when providing the meta data
response = requests . put (
2020-07-12 17:29:10 +00:00
url , data = content , headers = { " Content-Type " : expected_contenttype }
2020-07-12 12:33:46 +00:00
)
response . status_code . should . equal ( 200 )
# Assert the object exists
obj = s3 . get_object ( Bucket = bucket , Key = key )
obj [ " ContentType " ] . should . equal ( expected_contenttype )
obj [ " ContentLength " ] . should . equal ( 11 )
2020-07-12 17:29:10 +00:00
obj [ " Body " ] . read ( ) . should . equal ( content )
2020-07-12 12:33:46 +00:00
obj [ " Metadata " ] . should . equal ( { } )
s3 . delete_object ( Bucket = bucket , Key = key )
s3 . delete_bucket ( Bucket = bucket )
@mock_s3
def test_presigned_put_url_with_custom_headers ( ) :
bucket = str ( uuid . uuid4 ( ) )
key = " file.txt "
2020-07-12 17:29:10 +00:00
content = b " filecontent "
2020-07-12 12:33:46 +00:00
conn = boto3 . resource ( " s3 " , region_name = " us-east-1 " )
conn . create_bucket ( Bucket = bucket )
s3 = boto3 . client ( " s3 " , region_name = " us-east-1 " )
# Create a pre-signed url with some metadata.
url = s3 . generate_presigned_url (
ClientMethod = " put_object " ,
Params = { " Bucket " : bucket , " Key " : key , " Metadata " : { " venue " : " 123 " } } ,
)
# Verify S3 uploads correctly when providing the meta data
2020-07-12 17:29:10 +00:00
response = requests . put ( url , data = content )
2020-07-12 12:33:46 +00:00
response . status_code . should . equal ( 200 )
# Assert the object exists
obj = s3 . get_object ( Bucket = bucket , Key = key )
obj [ " ContentLength " ] . should . equal ( 11 )
2020-07-12 17:29:10 +00:00
obj [ " Body " ] . read ( ) . should . equal ( content )
2020-07-12 12:33:46 +00:00
obj [ " Metadata " ] . should . equal ( { " venue " : " 123 " } )
s3 . delete_object ( Bucket = bucket , Key = key )
s3 . delete_bucket ( Bucket = bucket )
2020-10-28 14:22:18 +00:00
@mock_s3
def test_request_partial_content_should_contain_content_length ( ) :
bucket = " bucket "
object_key = " key "
2021-08-18 19:25:33 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2020-10-28 14:22:18 +00:00
s3 . create_bucket ( Bucket = bucket )
s3 . Object ( bucket , object_key ) . put ( Body = " some text " )
file = s3 . Object ( bucket , object_key )
response = file . get ( Range = " bytes=0-1024 " )
response [ " ContentLength " ] . should . equal ( 9 )
@mock_s3
def test_request_partial_content_should_contain_actual_content_length ( ) :
bucket = " bucket "
object_key = " key "
2021-08-18 19:25:33 +00:00
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
2020-10-28 14:22:18 +00:00
s3 . create_bucket ( Bucket = bucket )
s3 . Object ( bucket , object_key ) . put ( Body = " some text " )
file = s3 . Object ( bucket , object_key )
requested_range = " bytes=1024- "
try :
file . get ( Range = requested_range )
except botocore . client . ClientError as e :
e . response [ " Error " ] [ " Code " ] . should . equal ( " InvalidRange " )
e . response [ " Error " ] [ " Message " ] . should . equal (
" The requested range is not satisfiable "
)
e . response [ " Error " ] [ " ActualObjectSize " ] . should . equal ( " 9 " )
e . response [ " Error " ] [ " RangeRequested " ] . should . equal ( requested_range )
2021-01-13 10:00:18 +00:00
2021-01-13 16:06:09 +00:00
@mock_s3
def test_get_unknown_version_should_throw_specific_error ( ) :
bucket_name = " my_bucket "
object_key = " hello.txt "
s3 = boto3 . resource ( " s3 " , region_name = " us-east-1 " )
client = boto3 . client ( " s3 " , region_name = " us-east-1 " )
bucket = s3 . create_bucket ( Bucket = bucket_name )
bucket . Versioning ( ) . enable ( )
content = " some text "
s3 . Object ( bucket_name , object_key ) . put ( Body = content )
with pytest . raises ( ClientError ) as e :
client . get_object ( Bucket = bucket_name , Key = object_key , VersionId = " unknown " )
2021-04-30 11:36:08 +00:00
e . value . response [ " Error " ] [ " Code " ] . should . equal ( " NoSuchVersion " )
e . value . response [ " Error " ] [ " Message " ] . should . equal (
" The specified version does not exist. "
)
2021-01-13 16:06:09 +00:00
2021-01-13 10:00:18 +00:00
@mock_s3
def test_request_partial_content_without_specifying_range_should_return_full_object ( ) :
bucket = " bucket "
object_key = " key "
s3 = boto3 . resource ( " s3 " , region_name = " us-east-1 " )
s3 . create_bucket ( Bucket = bucket )
s3 . Object ( bucket , object_key ) . put ( Body = " some text that goes a long way " )
file = s3 . Object ( bucket , object_key )
response = file . get ( Range = " " )
response [ " ContentLength " ] . should . equal ( 30 )
2021-01-18 18:17:13 +00:00
@mock_s3
def test_object_headers ( ) :
bucket = " my-bucket "
2021-08-18 19:25:33 +00:00
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2021-01-18 18:17:13 +00:00
s3 . create_bucket ( Bucket = bucket )
res = s3 . put_object (
Bucket = bucket ,
Body = b " test " ,
Key = " file.txt " ,
ServerSideEncryption = " aws:kms " ,
SSEKMSKeyId = " test " ,
BucketKeyEnabled = True ,
)
res . should . have . key ( " ETag " )
res . should . have . key ( " ServerSideEncryption " )
res . should . have . key ( " SSEKMSKeyId " )
res . should . have . key ( " BucketKeyEnabled " )
res = s3 . get_object ( Bucket = bucket , Key = " file.txt " )
res . should . have . key ( " ETag " )
res . should . have . key ( " ServerSideEncryption " )
res . should . have . key ( " SSEKMSKeyId " )
res . should . have . key ( " BucketKeyEnabled " )
2021-06-28 15:22:08 +00:00
2021-08-28 05:10:16 +00:00
if settings . TEST_SERVER_MODE :
@mock_s3
def test_upload_data_without_content_type ( ) :
bucket = " mybucket "
s3 = boto3 . client ( " s3 " )
s3 . create_bucket ( Bucket = bucket )
data_input = b " some data 123 321 "
req = requests . put ( " http://localhost:5000/mybucket/test.txt " , data = data_input )
req . status_code . should . equal ( 200 )
res = s3 . get_object ( Bucket = bucket , Key = " test.txt " )
data = res [ " Body " ] . read ( )
assert data == data_input
2021-06-28 15:22:08 +00:00
@mock_s3
def test_get_object_versions_with_prefix ( ) :
bucket_name = " testbucket-3113 "
2021-08-18 19:25:33 +00:00
s3_resource = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3_client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
2021-06-28 15:22:08 +00:00
s3_client . create_bucket ( Bucket = bucket_name )
bucket_versioning = s3_resource . BucketVersioning ( bucket_name )
bucket_versioning . enable ( )
s3_client . put_object ( Bucket = bucket_name , Body = b " test " , Key = " file.txt " )
s3_client . put_object ( Bucket = bucket_name , Body = b " test " , Key = " file.txt " )
s3_client . put_object ( Bucket = bucket_name , Body = b " alttest " , Key = " altfile.txt " )
s3_client . put_object ( Bucket = bucket_name , Body = b " test " , Key = " file.txt " )
versions = s3_client . list_object_versions ( Bucket = bucket_name , Prefix = " file " )
versions [ " Versions " ] . should . have . length_of ( 3 )
versions [ " Prefix " ] . should . equal ( " file " )
2021-09-01 15:30:01 +00:00
@mock_s3
def test_create_bucket_duplicate ( ) :
bucket_name = " same-bucket-test-1371 "
alternate_region = " eu-north-1 "
# Create it in the default region
default_client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
default_client . create_bucket ( Bucket = bucket_name )
# Create it again in the same region - should just return that same bucket
default_client . create_bucket ( Bucket = bucket_name )
# Create the bucket in a different region - should return an error
diff_client = boto3 . client ( " s3 " , region_name = alternate_region )
with pytest . raises ( ClientError ) as ex :
diff_client . create_bucket (
Bucket = bucket_name ,
CreateBucketConfiguration = { " LocationConstraint " : alternate_region } ,
)
err = ex . value . response [ " Error " ]
err [ " Code " ] . should . equal ( " BucketAlreadyOwnedByYou " )
err [ " Message " ] . should . equal (
" Your previous request to create the named bucket succeeded and you already own it. "
)
err [ " BucketName " ] . should . equal ( bucket_name )
# Try this again - but creating the bucket in a non-default region in the first place
bucket_name = " same-bucket-nondefault-region-test-1371 "
diff_client . create_bucket (
Bucket = bucket_name ,
CreateBucketConfiguration = { " LocationConstraint " : alternate_region } ,
)
# Recreating the bucket in the same non-default region should fail
with pytest . raises ( ClientError ) as ex :
diff_client . create_bucket (
Bucket = bucket_name ,
CreateBucketConfiguration = { " LocationConstraint " : alternate_region } ,
)
err = ex . value . response [ " Error " ]
err [ " Code " ] . should . equal ( " BucketAlreadyOwnedByYou " )
err [ " Message " ] . should . equal (
" Your previous request to create the named bucket succeeded and you already own it. "
)
err [ " BucketName " ] . should . equal ( bucket_name )
# Recreating the bucket in the default region should fail
diff_client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
with pytest . raises ( ClientError ) as ex :
diff_client . create_bucket ( Bucket = bucket_name )
err = ex . value . response [ " Error " ]
err [ " Code " ] . should . equal ( " BucketAlreadyOwnedByYou " )
err [ " Message " ] . should . equal (
" Your previous request to create the named bucket succeeded and you already own it. "
)
err [ " BucketName " ] . should . equal ( bucket_name )
# Recreating the bucket in a third region should fail
diff_client = boto3 . client ( " s3 " , region_name = " ap-northeast-1 " )
with pytest . raises ( ClientError ) as ex :
diff_client . create_bucket (
Bucket = bucket_name ,
CreateBucketConfiguration = { " LocationConstraint " : " ap-northeast-1 " } ,
)
err = ex . value . response [ " Error " ]
err [ " Code " ] . should . equal ( " BucketAlreadyOwnedByYou " )
err [ " Message " ] . should . equal (
" Your previous request to create the named bucket succeeded and you already own it. "
)
err [ " BucketName " ] . should . equal ( bucket_name )
2021-10-09 10:12:26 +00:00
@mock_s3
def test_delete_objects_with_empty_keyname ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
resource = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
bucket_name = " testbucket-4077 "
bucket = resource . create_bucket ( Bucket = bucket_name )
key_name = " "
bucket . put_object ( Key = key_name , Body = b " " )
client . list_objects ( Bucket = bucket_name ) . should . have . key ( " Contents " ) . length_of ( 1 )
bucket . delete_objects ( Delete = { " Objects " : [ { " Key " : key_name } ] } )
client . list_objects ( Bucket = bucket_name ) . shouldnt . have . key ( " Contents " )
bucket . put_object ( Key = key_name , Body = b " " )
client . delete_object ( Bucket = bucket_name , Key = key_name )
client . list_objects ( Bucket = bucket_name ) . shouldnt . have . key ( " Contents " )
2021-10-19 16:46:23 +00:00
@mock_s3
def test_head_object_should_return_default_content_type ( ) :
s3 = boto3 . resource ( " s3 " , region_name = " us-east-1 " )
s3 . create_bucket ( Bucket = " testbucket " )
s3 . Bucket ( " testbucket " ) . upload_fileobj ( BytesIO ( b " foobar " ) , Key = " testobject " )
s3_client = boto3 . client ( " s3 " , region_name = " us-east-1 " )
resp = s3_client . head_object ( Bucket = " testbucket " , Key = " testobject " )
resp [ " ContentType " ] . should . equal ( " binary/octet-stream " )
resp [ " ResponseMetadata " ] [ " HTTPHeaders " ] [ " content-type " ] . should . equal (
" binary/octet-stream "
)
s3 . Object ( " testbucket " , " testobject " ) . content_type . should . equal (
" binary/octet-stream "
)
2021-10-30 09:49:42 +00:00
@mock_s3
def test_request_partial_content_should_contain_all_metadata ( ) :
# github.com/spulec/moto/issues/4203
bucket = " bucket "
object_key = " key "
body = " some text "
query_range = " 0-3 "
s3 = boto3 . resource ( " s3 " , region_name = DEFAULT_REGION_NAME )
s3 . create_bucket ( Bucket = bucket )
obj = boto3 . resource ( " s3 " ) . Object ( bucket , object_key )
obj . put ( Body = body )
response = obj . get ( Range = " bytes= {} " . format ( query_range ) )
assert response [ " ETag " ] == obj . e_tag
assert response [ " LastModified " ] == obj . last_modified
assert response [ " ContentLength " ] == 4
assert response [ " ContentRange " ] == " bytes {} / {} " . format ( query_range , len ( body ) )
2021-11-18 20:57:12 +00:00
@mock_s3
@mock_kms
def test_boto3_copy_object_with_kms_encryption ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
kms_client = boto3 . client ( " kms " , region_name = DEFAULT_REGION_NAME )
kms_key = kms_client . create_key ( ) [ " KeyMetadata " ] [ " KeyId " ]
client . create_bucket (
Bucket = " blah " , CreateBucketConfiguration = { " LocationConstraint " : " eu-west-1 " }
)
client . put_object ( Bucket = " blah " , Key = " test1 " , Body = b " test1 " )
client . copy_object (
CopySource = { " Bucket " : " blah " , " Key " : " test1 " } ,
Bucket = " blah " ,
Key = " test2 " ,
SSEKMSKeyId = kms_key ,
ServerSideEncryption = " aws:kms " ,
)
result = client . head_object ( Bucket = " blah " , Key = " test2 " )
assert result [ " SSEKMSKeyId " ] == kms_key
assert result [ " ServerSideEncryption " ] == " aws:kms "
2021-11-26 21:05:48 +00:00
@mock_s3
def test_head_versioned_key_in_not_versioned_bucket ( ) :
client = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
client . create_bucket ( Bucket = " simple-bucked " )
with pytest . raises ( ClientError ) as ex :
client . head_object (
Bucket = " simple-bucked " , Key = " file.txt " , VersionId = " noVersion "
)
response = ex . value . response
assert response [ " Error " ] [ " Code " ] == " 400 "
2022-02-07 00:23:39 +00:00
@mock_s3
def test_objects_tagging_with_same_key_name ( ) :
s3 = boto3 . client ( " s3 " , region_name = DEFAULT_REGION_NAME )
key_name = " file.txt "
bucket1 = " bucket-1 "
s3 . create_bucket ( Bucket = bucket1 )
tagging = " variable=one "
s3 . put_object ( Bucket = bucket1 , Body = b " test " , Key = key_name , Tagging = tagging )
bucket2 = " bucket-2 "
s3 . create_bucket ( Bucket = bucket2 )
tagging2 = " variable=two "
s3 . put_object ( Bucket = bucket2 , Body = b " test " , Key = key_name , Tagging = tagging2 )
variable1 = s3 . get_object_tagging ( Bucket = bucket1 , Key = key_name ) [ " TagSet " ] [ 0 ] [
" Value "
]
variable2 = s3 . get_object_tagging ( Bucket = bucket2 , Key = key_name ) [ " TagSet " ] [ 0 ] [
" Value "
]
assert variable1 == " one "
assert variable2 == " two "