diff --git a/tests/test_s3/test_multiple_accounts_server.py b/tests/test_s3/test_multiple_accounts_server.py
index 7a41b6a9a..06795111b 100644
--- a/tests/test_s3/test_multiple_accounts_server.py
+++ b/tests/test_s3/test_multiple_accounts_server.py
@@ -1,8 +1,9 @@
+from unittest import SkipTest
+
import requests
from moto import settings
from moto.server import ThreadedMotoServer
-from unittest import SkipTest
SERVER_PORT = 5001
@@ -27,8 +28,8 @@ class TestAccountIdResolution:
requests.put(f"http://{name}.localhost:{SERVER_PORT}/")
res = requests.get(BASE_URL)
- res.content.should.contain(b"foo")
- res.content.should.contain(b"bar")
+ assert b"foo" in res.content
+ assert b"bar" in res.content
# Create two more buckets in another account
headers = {"x-moto-account-id": "333344445555"}
@@ -38,12 +39,12 @@ class TestAccountIdResolution:
# Verify only these buckets exist in this account
res = requests.get(BASE_URL, headers=headers)
- res.content.should.contain(b"baz")
- res.content.should.contain(b"bla")
- res.content.shouldnt.contain(b"foo")
- res.content.shouldnt.contain(b"bar")
+ assert b"baz" in res.content
+ assert b"bla" in res.content
+ assert b"foo" not in res.content
+ assert b"bar" not in res.content
# Verify these buckets do not exist in the original account
res = requests.get(BASE_URL)
- res.content.shouldnt.contain(b"baz")
- res.content.shouldnt.contain(b"bla")
+ assert b"baz" not in res.content
+ assert b"bla" not in res.content
diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py
index 73fd49e24..33cc75797 100644
--- a/tests/test_s3/test_s3.py
+++ b/tests/test_s3/test_s3.py
@@ -1,41 +1,38 @@
import datetime
-import os
-from urllib.parse import urlparse, parse_qs
from gzip import GzipFile
from io import BytesIO
-import zlib
-import pickle
-import uuid
-
import json
+import os
+import pickle
+from unittest import SkipTest, mock
+from urllib.parse import urlparse, parse_qs
+import uuid
+from uuid import uuid4
+import zlib
+
import boto3
from botocore.client import ClientError
import botocore.exceptions
from botocore.handlers import disable_signing
from freezegun import freeze_time
+import pytest
import requests
+from moto import settings, mock_s3, mock_config
from moto.moto_api import state_manager
from moto.s3.responses import DEFAULT_REGION_NAME
-from unittest import SkipTest, mock
-import pytest
-
-import sure # noqa # pylint: disable=unused-import
-
-from moto import settings, mock_s3, mock_config
import moto.s3.models as s3model
-from uuid import uuid4
-class MyModel(object):
+class MyModel:
def __init__(self, name, value, metadata=None):
self.name = name
self.value = value
self.metadata = metadata or {}
def save(self):
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.put_object(
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.put_object(
Bucket="mybucket", Key=self.name, Body=self.value, Metadata=self.metadata
)
@@ -92,20 +89,20 @@ def test_resource_get_object_returns_etag():
model_instance = MyModel("steve", "is awesome")
model_instance.save()
- conn.Bucket("mybucket").Object("steve").e_tag.should.equal(
+ assert conn.Bucket("mybucket").Object("steve").e_tag == (
'"d32bda93738f7e03adb22e66c90fbc04"'
)
@mock_s3
def test_key_save_to_missing_bucket():
- s3 = boto3.resource("s3")
+ s3_resource = boto3.resource("s3")
- key = s3.Object("mybucket", "the-key")
+ key = s3_resource.Object("mybucket", "the-key")
with pytest.raises(ClientError) as ex:
key.put(Body=b"foobar")
- ex.value.response["Error"]["Code"].should.equal("NoSuchBucket")
- ex.value.response["Error"]["Message"].should.equal(
+ assert ex.value.response["Error"]["Code"] == "NoSuchBucket"
+ assert ex.value.response["Error"]["Message"] == (
"The specified bucket does not exist"
)
@@ -114,43 +111,43 @@ def test_key_save_to_missing_bucket():
def test_missing_key_request():
if settings.TEST_SERVER_MODE:
raise SkipTest("Only test status code in non-ServerMode")
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="foobar")
response = requests.get("http://foobar.s3.amazonaws.com/the-key")
- response.status_code.should.equal(404)
+ assert response.status_code == 404
@mock_s3
def test_empty_key():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
key.put(Body=b"")
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp.should.have.key("ContentLength").equal(0)
- resp["Body"].read().should.equal(b"")
+ assert resp["ContentLength"] == 0
+ assert resp["Body"].read() == b""
@mock_s3
def test_key_name_encoding_in_listing():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
name = "6T7\x159\x12\r\x08.txt"
- key = s3.Object("foobar", name)
+ key = s3_resource.Object("foobar", name)
key.put(Body=b"")
key_received = client.list_objects(Bucket="foobar")["Contents"][0]["Key"]
- key_received.should.equal(name)
+ assert key_received == name
key_received = client.list_objects_v2(Bucket="foobar")["Contents"][0]["Key"]
- key_received.should.equal(name)
+ assert key_received == name
name = "example/file.text"
client.put_object(Bucket="foobar", Key=name, Body=b"")
@@ -158,75 +155,75 @@ def test_key_name_encoding_in_listing():
key_received = client.list_objects(
Bucket="foobar", Prefix="example/", Delimiter="/", MaxKeys=1, EncodingType="url"
)["Contents"][0]["Key"]
- key_received.should.equal(name)
+ assert key_received == name
@mock_s3
def test_empty_key_set_on_existing_key():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
key.put(Body=b"some content")
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp.should.have.key("ContentLength").equal(12)
- resp["Body"].read().should.equal(b"some content")
+ assert resp["ContentLength"] == 12
+ assert resp["Body"].read() == b"some content"
key.put(Body=b"")
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp.should.have.key("ContentLength").equal(0)
- resp["Body"].read().should.equal(b"")
+ assert resp["ContentLength"] == 0
+ assert resp["Body"].read() == b""
@mock_s3
def test_large_key_save():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
key.put(Body=b"foobar" * 100000)
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp["Body"].read().should.equal(b"foobar" * 100000)
+ assert resp["Body"].read() == b"foobar" * 100000
@mock_s3
def test_set_metadata():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
key.put(Body=b"some value", Metadata={"md": "Metadatastring"})
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp["Metadata"].should.equal({"md": "Metadatastring"})
+ assert resp["Metadata"] == {"md": "Metadatastring"}
@freeze_time("2012-01-01 12:00:00")
@mock_s3
def test_last_modified():
# See https://github.com/boto/boto/issues/466
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
key.put(Body=b"some value", Metadata={"md": "Metadatastring"})
- rs = client.list_objects_v2(Bucket="foobar")["Contents"]
- rs[0]["LastModified"].should.be.a(datetime.datetime)
+ resp = client.list_objects_v2(Bucket="foobar")["Contents"]
+ assert isinstance(resp[0]["LastModified"], datetime.datetime)
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp["LastModified"].should.be.a(datetime.datetime)
+ assert isinstance(resp["LastModified"], datetime.datetime)
as_header = resp["ResponseMetadata"]["HTTPHeaders"]["last-modified"]
- as_header.should.be.a(str)
+ assert isinstance(as_header, str)
if not settings.TEST_SERVER_MODE:
- as_header.should.equal("Sun, 01 Jan 2012 12:00:00 GMT")
+ assert as_header == "Sun, 01 Jan 2012 12:00:00 GMT"
@mock_s3
@@ -234,18 +231,18 @@ def test_missing_bucket():
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
with pytest.raises(ClientError) as ex:
client.head_bucket(Bucket="mybucket")
- ex.value.response["Error"]["Code"].should.equal("404")
- ex.value.response["Error"]["Message"].should.equal("Not Found")
+ assert ex.value.response["Error"]["Code"] == "404"
+ assert ex.value.response["Error"]["Message"] == "Not Found"
with pytest.raises(ClientError) as ex:
client.head_bucket(Bucket="dash-in-name")
- ex.value.response["Error"]["Code"].should.equal("404")
- ex.value.response["Error"]["Message"].should.equal("Not Found")
+ assert ex.value.response["Error"]["Code"] == "404"
+ assert ex.value.response["Error"]["Message"] == "Not Found"
@mock_s3
def test_create_existing_bucket():
- "Trying to create a bucket that already exists should raise an Error"
+ """Creating a bucket that already exists should raise an Error."""
client = boto3.client("s3", region_name="us-west-2")
kwargs = {
"Bucket": "foobar",
@@ -254,17 +251,16 @@ def test_create_existing_bucket():
client.create_bucket(**kwargs)
with pytest.raises(ClientError) as ex:
client.create_bucket(**kwargs)
- ex.value.response["Error"]["Code"].should.equal("BucketAlreadyOwnedByYou")
- ex.value.response["Error"]["Message"].should.equal(
+ assert ex.value.response["Error"]["Code"] == "BucketAlreadyOwnedByYou"
+ assert ex.value.response["Error"]["Message"] == (
"Your previous request to create the named bucket succeeded and you already own it."
)
@mock_s3
def test_create_existing_bucket_in_us_east_1():
- "Trying to create a bucket that already exists in us-east-1 returns the bucket"
+ """Creating a bucket that already exists in us-east-1 returns the bucket.
- """"
http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
Your previous request to create the named bucket succeeded and you already
own it. You get this error in all AWS regions except US Standard,
@@ -278,18 +274,18 @@ def test_create_existing_bucket_in_us_east_1():
@mock_s3
def test_bucket_deletion():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
key.put(Body=b"some value")
# Try to delete a bucket that still has keys
with pytest.raises(ClientError) as ex:
client.delete_bucket(Bucket="foobar")
- ex.value.response["Error"]["Code"].should.equal("BucketNotEmpty")
- ex.value.response["Error"]["Message"].should.equal(
+ assert ex.value.response["Error"]["Code"] == "BucketNotEmpty"
+ assert ex.value.response["Error"]["Message"] == (
"The bucket you tried to delete is not empty"
)
@@ -299,8 +295,8 @@ def test_bucket_deletion():
# Delete non-existent bucket
with pytest.raises(ClientError) as ex:
client.delete_bucket(Bucket="foobar")
- ex.value.response["Error"]["Code"].should.equal("NoSuchBucket")
- ex.value.response["Error"]["Message"].should.equal(
+ assert ex.value.response["Error"]["Code"] == "NoSuchBucket"
+ assert ex.value.response["Error"]["Message"] == (
"The specified bucket does not exist"
)
@@ -311,7 +307,7 @@ def test_get_all_buckets():
client.create_bucket(Bucket="foobar")
client.create_bucket(Bucket="foobar2")
- client.list_buckets()["Buckets"].should.have.length_of(2)
+ assert len(client.list_buckets()["Buckets"]) == 2
@mock_s3
@@ -328,7 +324,7 @@ def test_post_to_bucket():
)
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp["Body"].read().should.equal(b"nothing")
+ assert resp["Body"].read() == b"nothing"
@mock_s3
@@ -345,74 +341,76 @@ def test_post_with_metadata_to_bucket():
)
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp["Metadata"].should.equal({"test": "metadata"})
+ assert resp["Metadata"] == {"test": "metadata"}
@mock_s3
def test_delete_versioned_objects():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket = "test"
key = "test"
- s3.create_bucket(Bucket=bucket)
+ s3_client.create_bucket(Bucket=bucket)
- s3.put_object(Bucket=bucket, Key=key, Body=b"")
+ s3_client.put_object(Bucket=bucket, Key=key, Body=b"")
- s3.put_bucket_versioning(
+ s3_client.put_bucket_versioning(
Bucket=bucket, VersioningConfiguration={"Status": "Enabled"}
)
- objects = s3.list_objects_v2(Bucket=bucket).get("Contents")
- versions = s3.list_object_versions(Bucket=bucket).get("Versions")
- delete_markers = s3.list_object_versions(Bucket=bucket).get("DeleteMarkers")
+ objects = s3_client.list_objects_v2(Bucket=bucket).get("Contents")
+ versions = s3_client.list_object_versions(Bucket=bucket).get("Versions")
+ delete_markers = s3_client.list_object_versions(Bucket=bucket).get("DeleteMarkers")
- objects.should.have.length_of(1)
- versions.should.have.length_of(1)
- delete_markers.should.equal(None)
+ assert len(objects) == 1
+ assert len(versions) == 1
+ assert delete_markers is None
- s3.delete_object(Bucket=bucket, Key=key)
+ s3_client.delete_object(Bucket=bucket, Key=key)
- objects = s3.list_objects_v2(Bucket=bucket).get("Contents")
- versions = s3.list_object_versions(Bucket=bucket).get("Versions")
- delete_markers = s3.list_object_versions(Bucket=bucket).get("DeleteMarkers")
+ objects = s3_client.list_objects_v2(Bucket=bucket).get("Contents")
+ versions = s3_client.list_object_versions(Bucket=bucket).get("Versions")
+ delete_markers = s3_client.list_object_versions(Bucket=bucket).get("DeleteMarkers")
- objects.should.equal(None)
- versions.should.have.length_of(1)
- delete_markers.should.have.length_of(1)
+ assert objects is None
+ assert len(versions) == 1
+ assert len(delete_markers) == 1
- s3.delete_object(Bucket=bucket, Key=key, VersionId=versions[0].get("VersionId"))
+ s3_client.delete_object(
+ Bucket=bucket, Key=key, VersionId=versions[0].get("VersionId")
+ )
- objects = s3.list_objects_v2(Bucket=bucket).get("Contents")
- versions = s3.list_object_versions(Bucket=bucket).get("Versions")
- delete_markers = s3.list_object_versions(Bucket=bucket).get("DeleteMarkers")
+ objects = s3_client.list_objects_v2(Bucket=bucket).get("Contents")
+ versions = s3_client.list_object_versions(Bucket=bucket).get("Versions")
+ delete_markers = s3_client.list_object_versions(Bucket=bucket).get("DeleteMarkers")
- objects.should.equal(None)
- versions.should.equal(None)
- delete_markers.should.have.length_of(1)
+ assert objects is None
+ assert versions is None
+ assert len(delete_markers) == 1
- s3.delete_object(
+ s3_client.delete_object(
Bucket=bucket, Key=key, VersionId=delete_markers[0].get("VersionId")
)
- objects = s3.list_objects_v2(Bucket=bucket).get("Contents")
- versions = s3.list_object_versions(Bucket=bucket).get("Versions")
- delete_markers = s3.list_object_versions(Bucket=bucket).get("DeleteMarkers")
+ objects = s3_client.list_objects_v2(Bucket=bucket).get("Contents")
+ versions = s3_client.list_object_versions(Bucket=bucket).get("Versions")
+ delete_markers = s3_client.list_object_versions(Bucket=bucket).get("DeleteMarkers")
- objects.should.equal(None)
- versions.should.equal(None)
- delete_markers.should.equal(None)
+ assert objects is None
+ assert versions is None
+ assert delete_markers is None
@mock_s3
def test_delete_missing_key():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
- s3.Object("foobar", "key1").put(Body=b"some value")
- s3.Object("foobar", "key2").put(Body=b"some value")
- s3.Object("foobar", "key3").put(Body=b"some value")
- s3.Object("foobar", "key4").put(Body=b"some value")
+ s3_resource.Object("foobar", "key1").put(Body=b"some value")
+ s3_resource.Object("foobar", "key2").put(Body=b"some value")
+ s3_resource.Object("foobar", "key3").put(Body=b"some value")
+ s3_resource.Object("foobar", "key4").put(Body=b"some value")
result = bucket.delete_objects(
Delete={
@@ -424,13 +422,13 @@ def test_delete_missing_key():
]
}
)
- result.should.have.key("Deleted").equal(
+ assert result["Deleted"] == (
[{"Key": "unknown"}, {"Key": "key1"}, {"Key": "key3"}, {"Key": "typo"}]
)
- result.shouldnt.have.key("Errors")
+ assert "Errors" not in result
objects = list(bucket.objects.all())
- set([o.key for o in objects]).should.equal(set(["key2", "key4"]))
+ assert {o.key for o in objects} == set(["key2", "key4"])
@mock_s3
@@ -443,15 +441,15 @@ def test_delete_empty_keys_list():
@pytest.mark.parametrize("name", ["firstname.lastname", "with-dash"])
@mock_s3
def test_bucket_name_with_special_chars(name):
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket(name)
+ bucket = s3_resource.Bucket(name)
bucket.create()
- s3.Object(name, "the-key").put(Body=b"some value")
+ s3_resource.Object(name, "the-key").put(Body=b"some value")
resp = client.get_object(Bucket=name, Key="the-key")
- resp["Body"].read().should.equal(b"some value")
+ assert resp["Body"].read() == b"some value"
@pytest.mark.parametrize(
@@ -459,67 +457,65 @@ def test_bucket_name_with_special_chars(name):
)
@mock_s3
def test_key_with_special_characters(key):
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("testname")
+ bucket = s3_resource.Bucket("testname")
bucket.create()
- s3.Object("testname", key).put(Body=b"value")
+ s3_resource.Object("testname", key).put(Body=b"value")
objects = list(bucket.objects.all())
- [o.key for o in objects].should.equal([key])
+ assert [o.key for o in objects] == [key]
resp = client.get_object(Bucket="testname", Key=key)
- resp["Body"].read().should.equal(b"value")
+ assert resp["Body"].read() == b"value"
@mock_s3
def test_bucket_key_listing_order():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "test_bucket"
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
bucket.create()
prefix = "toplevel/"
names = ["x/key", "y.key1", "y.key2", "y.key3", "x/y/key", "x/y/z/key"]
for name in names:
- s3.Object(bucket_name, prefix + name).put(Body=b"somedata")
+ s3_resource.Object(bucket_name, prefix + name).put(Body=b"somedata")
delimiter = ""
keys = [x.key for x in bucket.objects.filter(Prefix=prefix, Delimiter=delimiter)]
- keys.should.equal(
- [
- "toplevel/x/key",
- "toplevel/x/y/key",
- "toplevel/x/y/z/key",
- "toplevel/y.key1",
- "toplevel/y.key2",
- "toplevel/y.key3",
- ]
- )
+ assert keys == [
+ "toplevel/x/key",
+ "toplevel/x/y/key",
+ "toplevel/x/y/z/key",
+ "toplevel/y.key1",
+ "toplevel/y.key2",
+ "toplevel/y.key3",
+ ]
delimiter = "/"
keys = [x.key for x in bucket.objects.filter(Prefix=prefix, Delimiter=delimiter)]
- keys.should.equal(["toplevel/y.key1", "toplevel/y.key2", "toplevel/y.key3"])
+ assert keys == ["toplevel/y.key1", "toplevel/y.key2", "toplevel/y.key3"]
# Test delimiter with no prefix
keys = [x.key for x in bucket.objects.filter(Delimiter=delimiter)]
- keys.should.equal([])
+ assert keys == []
prefix = "toplevel/x"
keys = [x.key for x in bucket.objects.filter(Prefix=prefix)]
- keys.should.equal(["toplevel/x/key", "toplevel/x/y/key", "toplevel/x/y/z/key"])
+ assert keys == ["toplevel/x/key", "toplevel/x/y/key", "toplevel/x/y/z/key"]
keys = [x.key for x in bucket.objects.filter(Prefix=prefix, Delimiter=delimiter)]
- keys.should.equal([])
+ assert keys == []
@mock_s3
def test_key_with_reduced_redundancy():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "test_bucket"
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
bucket.create()
bucket.put_object(
@@ -528,32 +524,32 @@ def test_key_with_reduced_redundancy():
# we use the bucket iterator because of:
# https:/github.com/boto/boto/issues/1173
- [x.storage_class for x in bucket.objects.all()].should.equal(["REDUCED_REDUNDANCY"])
+ assert [x.storage_class for x in bucket.objects.all()] == ["REDUCED_REDUNDANCY"]
@freeze_time("2012-01-01 12:00:00")
@mock_s3
def test_restore_key():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
key = bucket.put_object(Key="the-key", Body=b"somedata", StorageClass="GLACIER")
- key.restore.should.equal(None)
+ assert key.restore is None
key.restore_object(RestoreRequest={"Days": 1})
if settings.TEST_SERVER_MODE:
- key.restore.should.contain('ongoing-request="false"')
+ assert 'ongoing-request="false"' in key.restore
else:
- key.restore.should.equal(
+ assert key.restore == (
'ongoing-request="false", expiry-date="Mon, 02 Jan 2012 12:00:00 GMT"'
)
key.restore_object(RestoreRequest={"Days": 2})
if settings.TEST_SERVER_MODE:
- key.restore.should.contain('ongoing-request="false"')
+ assert 'ongoing-request="false"' in key.restore
else:
- key.restore.should.equal(
+ assert key.restore == (
'ongoing-request="false", expiry-date="Tue, 03 Jan 2012 12:00:00 GMT"'
)
@@ -568,32 +564,32 @@ def test_restore_key_transition():
model_name="s3::keyrestore", transition={"progression": "manual", "times": 1}
)
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
key = bucket.put_object(Key="the-key", Body=b"somedata", StorageClass="GLACIER")
- key.restore.should.equal(None)
+ assert key.restore is None
key.restore_object(RestoreRequest={"Days": 1})
# first call: there should be an ongoing request
- key.restore.should.contain('ongoing-request="true"')
+ assert 'ongoing-request="true"' in key.restore
# second call: request should be done
key.load()
- key.restore.should.contain('ongoing-request="false"')
+ assert 'ongoing-request="false"' in key.restore
# third call: request should still be done
key.load()
- key.restore.should.contain('ongoing-request="false"')
+ assert 'ongoing-request="false"' in key.restore
state_manager.unset_transition(model_name="s3::keyrestore")
@mock_s3
def test_cannot_restore_standard_class_object():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
key = bucket.put_object(Key="the-key", Body=b"somedata")
@@ -601,34 +597,34 @@ def test_cannot_restore_standard_class_object():
key.restore_object(RestoreRequest={"Days": 1})
err = err.value.response["Error"]
- err["Code"].should.equal("InvalidObjectState")
- err["StorageClass"].should.equal("STANDARD")
- err["Message"].should.equal(
+ assert err["Code"] == "InvalidObjectState"
+ assert err["StorageClass"] == "STANDARD"
+ assert err["Message"] == (
"The operation is not valid for the object's storage class"
)
@mock_s3
def test_get_versioning_status():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
- v = s3.BucketVersioning("foobar")
- v.status.should.equal(None)
+ version_info = s3_resource.BucketVersioning("foobar")
+ assert version_info.status is None
- v.enable()
- v.status.should.equal("Enabled")
+ version_info.enable()
+ assert version_info.status == "Enabled"
- v.suspend()
- v.status.should.equal("Suspended")
+ version_info.suspend()
+ assert version_info.status == "Suspended"
@mock_s3
def test_key_version():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
bucket.Versioning().enable()
@@ -638,17 +634,17 @@ def test_key_version():
versions.append(key.version_id)
key.put(Body=b"some string")
versions.append(key.version_id)
- set(versions).should.have.length_of(2)
+ assert len(set(versions)) == 2
key = client.get_object(Bucket="foobar", Key="the-key")
- key["VersionId"].should.equal(versions[-1])
+ assert key["VersionId"] == versions[-1]
@mock_s3
def test_list_versions():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
bucket.Versioning().enable()
@@ -658,39 +654,39 @@ def test_list_versions():
key_versions.append(key.version_id)
key = bucket.put_object(Key="the-key", Body=b"Version 2")
key_versions.append(key.version_id)
- key_versions.should.have.length_of(2)
+ assert len(key_versions) == 2
versions = client.list_object_versions(Bucket="foobar")["Versions"]
- versions.should.have.length_of(2)
+ assert len(versions) == 2
- versions[0]["Key"].should.equal("the-key")
- versions[0]["VersionId"].should.equal(key_versions[1])
+ assert versions[0]["Key"] == "the-key"
+ assert versions[0]["VersionId"] == key_versions[1]
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp["Body"].read().should.equal(b"Version 2")
+ assert resp["Body"].read() == b"Version 2"
resp = client.get_object(
Bucket="foobar", Key="the-key", VersionId=versions[0]["VersionId"]
)
- resp["Body"].read().should.equal(b"Version 2")
+ assert resp["Body"].read() == b"Version 2"
- versions[1]["Key"].should.equal("the-key")
- versions[1]["VersionId"].should.equal(key_versions[0])
+ assert versions[1]["Key"] == "the-key"
+ assert versions[1]["VersionId"] == key_versions[0]
resp = client.get_object(
Bucket="foobar", Key="the-key", VersionId=versions[1]["VersionId"]
)
- resp["Body"].read().should.equal(b"Version 1")
+ assert resp["Body"].read() == b"Version 1"
bucket.put_object(Key="the2-key", Body=b"Version 1")
- list(bucket.objects.all()).should.have.length_of(2)
+ assert len(list(bucket.objects.all())) == 2
versions = client.list_object_versions(Bucket="foobar", Prefix="the2")["Versions"]
- versions.should.have.length_of(1)
+ assert len(versions) == 1
@mock_s3
def test_acl_setting():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
content = b"imafile"
@@ -700,22 +696,20 @@ def test_acl_setting():
)
grants = client.get_object_acl(Bucket="foobar", Key=keyname)["Grants"]
- grants.should.contain(
- {
- "Grantee": {
- "Type": "Group",
- "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
- },
- "Permission": "READ",
- }
- )
+ assert {
+ "Grantee": {
+ "Type": "Group",
+ "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
+ },
+ "Permission": "READ",
+ } in grants
@mock_s3
def test_acl_setting_via_headers():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
keyname = "test.txt"
@@ -724,22 +718,20 @@ def test_acl_setting_via_headers():
client.put_object_acl(ACL="public-read", Bucket="foobar", Key=keyname)
grants = client.get_object_acl(Bucket="foobar", Key=keyname)["Grants"]
- grants.should.contain(
- {
- "Grantee": {
- "Type": "Group",
- "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
- },
- "Permission": "READ",
- }
- )
+ assert {
+ "Grantee": {
+ "Type": "Group",
+ "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
+ },
+ "Permission": "READ",
+ } in grants
@mock_s3
def test_acl_switching():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
keyname = "test.txt"
@@ -747,32 +739,30 @@ def test_acl_switching():
client.put_object_acl(ACL="private", Bucket="foobar", Key=keyname)
grants = client.get_object_acl(Bucket="foobar", Key=keyname)["Grants"]
- grants.shouldnt.contain(
- {
- "Grantee": {
- "Type": "Group",
- "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
- },
- "Permission": "READ",
- }
- )
+ assert {
+ "Grantee": {
+ "Type": "Group",
+ "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
+ },
+ "Permission": "READ",
+ } not in grants
@mock_s3
def test_acl_switching_nonexistent_key():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
- with pytest.raises(ClientError) as e:
- s3.put_object_acl(Bucket="mybucket", Key="nonexistent", ACL="private")
+ with pytest.raises(ClientError) as exc:
+ s3_client.put_object_acl(Bucket="mybucket", Key="nonexistent", ACL="private")
- e.value.response["Error"]["Code"].should.equal("NoSuchKey")
+ assert exc.value.response["Error"]["Code"] == "NoSuchKey"
@mock_s3
def test_streaming_upload_from_file_to_presigned_url():
- s3 = boto3.resource("s3", region_name="us-east-1")
- bucket = s3.Bucket("test-bucket")
+ s3_resource = boto3.resource("s3", region_name="us-east-1")
+ bucket = s3_resource.Bucket("test-bucket")
bucket.create()
bucket.put_object(Body=b"ABCD", Key="file.txt")
@@ -780,28 +770,28 @@ def test_streaming_upload_from_file_to_presigned_url():
presigned_url = boto3.client("s3").generate_presigned_url(
"put_object", params, ExpiresIn=900
)
- with open(__file__, "rb") as f:
- response = requests.get(presigned_url, data=f)
+ with open(__file__, "rb") as fhandle:
+ response = requests.get(presigned_url, data=fhandle)
assert response.status_code == 200
@mock_s3
def test_upload_from_file_to_presigned_url():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
params = {"Bucket": "mybucket", "Key": "file_upload"}
presigned_url = boto3.client("s3").generate_presigned_url(
"put_object", params, ExpiresIn=900
)
- file = open("text.txt", "w")
+ file = open("text.txt", "w", encoding="utf-8")
file.write("test")
file.close()
files = {"upload_file": open("text.txt", "rb")}
requests.put(presigned_url, files=files)
- resp = s3.get_object(Bucket="mybucket", Key="file_upload")
+ resp = s3_client.get_object(Bucket="mybucket", Key="file_upload")
data = resp["Body"].read()
assert data == b"test"
# cleanup
@@ -810,10 +800,13 @@ def test_upload_from_file_to_presigned_url():
@mock_s3
def test_upload_file_with_checksum_algorithm():
- random_bytes = b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x00\x00\x01\x00\x01\x00\x00\xff\n\xdb\x00C\x00\x08\x06\x06\x07\x06\x05\x08\x07\x07\n"
- with open("rb.tmp", mode="wb") as f:
- f.write(random_bytes)
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ random_bytes = (
+ b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x00\x00\x01\x00\x01\x00"
+ b"\x00\xff\n\xdb\x00C\x00\x08\x06\x06\x07\x06\x05\x08\x07\x07\n"
+ )
+ with open("rb.tmp", mode="wb") as fhandle:
+ fhandle.write(random_bytes)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket = "mybucket"
s3_client.create_bucket(Bucket=bucket)
@@ -822,7 +815,7 @@ def test_upload_file_with_checksum_algorithm():
)
os.remove("rb.tmp")
- actual_content = s3.Object(bucket, "my_key.csv").get()["Body"].read()
+ actual_content = s3_resource.Object(bucket, "my_key.csv").get()["Body"].read()
assert random_bytes == actual_content
@@ -833,15 +826,20 @@ def test_put_chunked_with_v4_signature_in_body():
content = "CONTENT"
content_bytes = bytes(content, encoding="utf8")
# 'CONTENT' as received in moto, when PutObject is called in java AWS SDK v2
- chunked_body = b"7;chunk-signature=bd479c607ec05dd9d570893f74eed76a4b333dfa37ad6446f631ec47dc52e756\r\nCONTENT\r\n0;chunk-signature=d192ec4075ddfc18d2ef4da4f55a87dc762ba4417b3bd41e70c282f8bec2ece0\r\n\r\n"
+ chunked_body = (
+ b"7;chunk-signature=bd479c607ec05dd9d570893f74eed76a4b333dfa37ad"
+ b"6446f631ec47dc52e756\r\nCONTENT\r\n"
+ b"0;chunk-signature=d192ec4075ddfc18d2ef4da4f55a87dc762ba4417b3b"
+ b"d41e70c282f8bec2ece0\r\n\r\n"
+ )
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
model = MyModel(file_name, content)
model.save()
- boto_etag = s3.get_object(Bucket=bucket_name, Key=file_name)["ETag"]
+ boto_etag = s3_client.get_object(Bucket=bucket_name, Key=file_name)["ETag"]
params = {"Bucket": bucket_name, "Key": file_name}
# We'll use manipulated presigned PUT, to mimick PUT from SDK
@@ -857,7 +855,7 @@ def test_put_chunked_with_v4_signature_in_body():
"x-amz-decoded-content-length": str(len(content_bytes)),
},
)
- resp = s3.get_object(Bucket=bucket_name, Key=file_name)
+ resp = s3_client.get_object(Bucket=bucket_name, Key=file_name)
body = resp["Body"].read()
assert body == content_bytes
@@ -867,8 +865,8 @@ def test_put_chunked_with_v4_signature_in_body():
@mock_s3
def test_s3_object_in_private_bucket():
- s3 = boto3.resource("s3")
- bucket = s3.Bucket("test-bucket")
+ s3_resource = boto3.resource("s3")
+ bucket = s3_resource.Bucket("test-bucket")
bucket.create(
ACL="private", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
@@ -879,7 +877,7 @@ def test_s3_object_in_private_bucket():
with pytest.raises(ClientError) as exc:
s3_anonymous.Object(key="file.txt", bucket_name="test-bucket").get()
- exc.value.response["Error"]["Code"].should.equal("403")
+ assert exc.value.response["Error"]["Code"] == "403"
bucket.put_object(ACL="public-read", Body=b"ABCD", Key="file.txt")
contents = (
@@ -887,45 +885,45 @@ def test_s3_object_in_private_bucket():
.get()["Body"]
.read()
)
- contents.should.equal(b"ABCD")
+ assert contents == b"ABCD"
@mock_s3
def test_unicode_key():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("mybucket")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("mybucket")
bucket.create()
key = bucket.put_object(Key="こんにちは.jpg", Body=b"Hello world!")
- [listed_key.key for listed_key in bucket.objects.all()].should.equal([key.key])
- fetched_key = s3.Object("mybucket", key.key)
- fetched_key.key.should.equal(key.key)
- fetched_key.get()["Body"].read().decode("utf-8").should.equal("Hello world!")
+ assert [listed_key.key for listed_key in bucket.objects.all()] == [key.key]
+ fetched_key = s3_resource.Object("mybucket", key.key)
+ assert fetched_key.key == key.key
+ assert fetched_key.get()["Body"].read().decode("utf-8") == "Hello world!"
@mock_s3
def test_unicode_value():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("mybucket")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("mybucket")
bucket.create()
bucket.put_object(Key="some_key", Body="こんにちは.jpg")
- key = s3.Object("mybucket", "some_key")
- key.get()["Body"].read().decode("utf-8").should.equal("こんにちは.jpg")
+ key = s3_resource.Object("mybucket", "some_key")
+ assert key.get()["Body"].read().decode("utf-8") == "こんにちは.jpg"
@mock_s3
def test_setting_content_encoding():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("mybucket")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("mybucket")
bucket.create()
bucket.put_object(Body=b"abcdef", ContentEncoding="gzip", Key="keyname")
- key = s3.Object("mybucket", "keyname")
- key.content_encoding.should.equal("gzip")
+ key = s3_resource.Object("mybucket", "keyname")
+ assert key.content_encoding == "gzip"
@mock_s3
@@ -934,7 +932,7 @@ def test_bucket_location_default():
bucket_name = "mybucket"
# No LocationConstraint ==> us-east-1
cli.create_bucket(Bucket=bucket_name)
- cli.get_bucket_location(Bucket=bucket_name)["LocationConstraint"].should.equal(None)
+ assert cli.get_bucket_location(Bucket=bucket_name)["LocationConstraint"] is None
@mock_s3
@@ -946,71 +944,73 @@ def test_bucket_location_nondefault():
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": "eu-central-1"},
)
- cli.get_bucket_location(Bucket=bucket_name)["LocationConstraint"].should.equal(
- "eu-central-1"
+ assert (
+ cli.get_bucket_location(Bucket=bucket_name)["LocationConstraint"]
+ == "eu-central-1"
)
@mock_s3
def test_s3_location_should_error_outside_useast1():
- s3 = boto3.client("s3", region_name="eu-west-1")
+ s3_client = boto3.client("s3", region_name="eu-west-1")
bucket_name = "asdfasdfsdfdsfasda"
- with pytest.raises(ClientError) as e:
- s3.create_bucket(Bucket=bucket_name)
- e.value.response["Error"]["Message"].should.equal(
- "The unspecified location constraint is incompatible for the region specific endpoint this request was sent to."
+ with pytest.raises(ClientError) as exc:
+ s3_client.create_bucket(Bucket=bucket_name)
+ assert exc.value.response["Error"]["Message"] == (
+ "The unspecified location constraint is incompatible for the "
+ "region specific endpoint this request was sent to."
)
@mock_s3
def test_ranged_get():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("mybucket")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.Bucket("mybucket")
bucket.create()
rep = b"0123456789"
key = bucket.put_object(Key="bigkey", Body=rep * 10)
# Implicitly bounded range requests.
- key.get(Range="bytes=0-")["Body"].read().should.equal(rep * 10)
- key.get(Range="bytes=50-")["Body"].read().should.equal(rep * 5)
- key.get(Range="bytes=99-")["Body"].read().should.equal(b"9")
+ assert key.get(Range="bytes=0-")["Body"].read() == rep * 10
+ assert key.get(Range="bytes=50-")["Body"].read() == rep * 5
+ assert key.get(Range="bytes=99-")["Body"].read() == b"9"
# Explicitly bounded range requests starting from the first byte.
- key.get(Range="bytes=0-0")["Body"].read().should.equal(b"0")
- key.get(Range="bytes=0-49")["Body"].read().should.equal(rep * 5)
- key.get(Range="bytes=0-99")["Body"].read().should.equal(rep * 10)
- key.get(Range="bytes=0-100")["Body"].read().should.equal(rep * 10)
- key.get(Range="bytes=0-700")["Body"].read().should.equal(rep * 10)
+ assert key.get(Range="bytes=0-0")["Body"].read() == b"0"
+ assert key.get(Range="bytes=0-49")["Body"].read() == rep * 5
+ assert key.get(Range="bytes=0-99")["Body"].read() == rep * 10
+ assert key.get(Range="bytes=0-100")["Body"].read() == rep * 10
+ assert key.get(Range="bytes=0-700")["Body"].read() == rep * 10
# Explicitly bounded range requests starting from the / a middle byte.
- key.get(Range="bytes=50-54")["Body"].read().should.equal(rep[:5])
- key.get(Range="bytes=50-99")["Body"].read().should.equal(rep * 5)
- key.get(Range="bytes=50-100")["Body"].read().should.equal(rep * 5)
- key.get(Range="bytes=50-700")["Body"].read().should.equal(rep * 5)
+ assert key.get(Range="bytes=50-54")["Body"].read() == rep[:5]
+ assert key.get(Range="bytes=50-99")["Body"].read() == rep * 5
+ assert key.get(Range="bytes=50-100")["Body"].read() == rep * 5
+ assert key.get(Range="bytes=50-700")["Body"].read() == rep * 5
# Explicitly bounded range requests starting from the last byte.
- key.get(Range="bytes=99-99")["Body"].read().should.equal(b"9")
- key.get(Range="bytes=99-100")["Body"].read().should.equal(b"9")
- key.get(Range="bytes=99-700")["Body"].read().should.equal(b"9")
+ assert key.get(Range="bytes=99-99")["Body"].read() == b"9"
+ assert key.get(Range="bytes=99-100")["Body"].read() == b"9"
+ assert key.get(Range="bytes=99-700")["Body"].read() == b"9"
# Suffix range requests.
- key.get(Range="bytes=-1")["Body"].read().should.equal(b"9")
- key.get(Range="bytes=-60")["Body"].read().should.equal(rep * 6)
- key.get(Range="bytes=-100")["Body"].read().should.equal(rep * 10)
- key.get(Range="bytes=-101")["Body"].read().should.equal(rep * 10)
- key.get(Range="bytes=-700")["Body"].read().should.equal(rep * 10)
+ assert key.get(Range="bytes=-1")["Body"].read() == b"9"
+ assert key.get(Range="bytes=-60")["Body"].read() == rep * 6
+ assert key.get(Range="bytes=-100")["Body"].read() == rep * 10
+ assert key.get(Range="bytes=-101")["Body"].read() == rep * 10
+ assert key.get(Range="bytes=-700")["Body"].read() == rep * 10
- key.content_length.should.equal(100)
+ assert key.content_length == 100
@mock_s3
def test_policy():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
bucket.create()
policy = json.dumps(
@@ -1036,28 +1036,26 @@ def test_policy():
with pytest.raises(ClientError) as ex:
client.get_bucket_policy(Bucket=bucket_name)
- ex.value.response["Error"]["Code"].should.equal("NoSuchBucketPolicy")
- ex.value.response["Error"]["Message"].should.equal(
- "The bucket policy does not exist"
- )
+ assert ex.value.response["Error"]["Code"] == "NoSuchBucketPolicy"
+ assert ex.value.response["Error"]["Message"] == "The bucket policy does not exist"
client.put_bucket_policy(Bucket=bucket_name, Policy=policy)
- client.get_bucket_policy(Bucket=bucket_name)["Policy"].should.equal(policy)
+ assert client.get_bucket_policy(Bucket=bucket_name)["Policy"] == policy
client.delete_bucket_policy(Bucket=bucket_name)
with pytest.raises(ClientError) as ex:
client.get_bucket_policy(Bucket=bucket_name)
- ex.value.response["Error"]["Code"].should.equal("NoSuchBucketPolicy")
+ assert ex.value.response["Error"]["Code"] == "NoSuchBucketPolicy"
@mock_s3
def test_website_configuration_xml():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
bucket.create()
client.put_bucket_website(
@@ -1072,76 +1070,76 @@ def test_website_configuration_xml():
],
},
)
- c = client.get_bucket_website(Bucket=bucket_name)
- c.should.have.key("IndexDocument").equals({"Suffix": "index.html"})
- c.should.have.key("RoutingRules")
- c["RoutingRules"].should.have.length_of(1)
- rule = c["RoutingRules"][0]
- rule.should.have.key("Condition").equals({"KeyPrefixEquals": "test/testing"})
- rule.should.have.key("Redirect").equals({"ReplaceKeyWith": "test.txt"})
+ site_info = client.get_bucket_website(Bucket=bucket_name)
+ assert site_info["IndexDocument"] == {"Suffix": "index.html"}
+ assert "RoutingRules" in site_info
+ assert len(site_info["RoutingRules"]) == 1
+ rule = site_info["RoutingRules"][0]
+ assert rule["Condition"] == {"KeyPrefixEquals": "test/testing"}
+ assert rule["Redirect"] == {"ReplaceKeyWith": "test.txt"}
- c.shouldnt.have.key("RedirectAllRequestsTo")
- c.shouldnt.have.key("ErrorDocument")
+ assert "RedirectAllRequestsTo" not in site_info
+ assert "ErrorDocument" not in site_info
@mock_s3
def test_client_get_object_returns_etag():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- s3.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome")
- resp = s3.get_object(Bucket="mybucket", Key="steve")
- resp["ETag"].should.equal('"d32bda93738f7e03adb22e66c90fbc04"')
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ s3_client.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome")
+ resp = s3_client.get_object(Bucket="mybucket", Key="steve")
+ assert resp["ETag"] == '"d32bda93738f7e03adb22e66c90fbc04"'
@mock_s3
def test_website_redirect_location():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
- s3.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome")
- resp = s3.get_object(Bucket="mybucket", Key="steve")
- resp.get("WebsiteRedirectLocation").should.equal(None)
+ s3_client.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome")
+ resp = s3_client.get_object(Bucket="mybucket", Key="steve")
+ assert resp.get("WebsiteRedirectLocation") is None
url = "https://github.com/getmoto/moto"
- s3.put_object(
+ s3_client.put_object(
Bucket="mybucket", Key="steve", Body=b"is awesome", WebsiteRedirectLocation=url
)
- resp = s3.get_object(Bucket="mybucket", Key="steve")
- resp["WebsiteRedirectLocation"].should.equal(url)
+ resp = s3_client.get_object(Bucket="mybucket", Key="steve")
+ assert resp["WebsiteRedirectLocation"] == url
@mock_s3
def test_delimiter_optional_in_response():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- s3.put_object(Bucket="mybucket", Key="one", Body=b"1")
- resp = s3.list_objects(Bucket="mybucket", MaxKeys=1)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ s3_client.put_object(Bucket="mybucket", Key="one", Body=b"1")
+ resp = s3_client.list_objects(Bucket="mybucket", MaxKeys=1)
assert resp.get("Delimiter") is None
- resp = s3.list_objects(Bucket="mybucket", MaxKeys=1, Delimiter="/")
+ resp = s3_client.list_objects(Bucket="mybucket", MaxKeys=1, Delimiter="/")
assert resp.get("Delimiter") == "/"
@mock_s3
def test_list_objects_with_pagesize_0():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- resp = s3.list_objects(Bucket="mybucket", MaxKeys=0)
- resp["Name"].should.equal("mybucket")
- resp["MaxKeys"].should.equal(0)
- resp["IsTruncated"].should.equal(False)
- resp.shouldnt.have.key("Contents")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ resp = s3_client.list_objects(Bucket="mybucket", MaxKeys=0)
+ assert resp["Name"] == "mybucket"
+ assert resp["MaxKeys"] == 0
+ assert resp["IsTruncated"] is False
+ assert "Contents" not in resp
@mock_s3
def test_list_objects_truncated_response():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- s3.put_object(Bucket="mybucket", Key="one", Body=b"1")
- s3.put_object(Bucket="mybucket", Key="two", Body=b"22")
- s3.put_object(Bucket="mybucket", Key="three", Body=b"333")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ s3_client.put_object(Bucket="mybucket", Key="one", Body=b"1")
+ s3_client.put_object(Bucket="mybucket", Key="two", Body=b"22")
+ s3_client.put_object(Bucket="mybucket", Key="three", Body=b"333")
# First list
- resp = s3.list_objects(Bucket="mybucket", MaxKeys=1)
+ resp = s3_client.list_objects(Bucket="mybucket", MaxKeys=1)
listed_object = resp["Contents"][0]
assert listed_object["Key"] == "one"
@@ -1154,7 +1152,7 @@ def test_list_objects_truncated_response():
next_marker = resp["NextMarker"]
# Second list
- resp = s3.list_objects(Bucket="mybucket", MaxKeys=1, Marker=next_marker)
+ resp = s3_client.list_objects(Bucket="mybucket", MaxKeys=1, Marker=next_marker)
listed_object = resp["Contents"][0]
assert listed_object["Key"] == "three"
@@ -1167,7 +1165,7 @@ def test_list_objects_truncated_response():
next_marker = resp["NextMarker"]
# Third list
- resp = s3.list_objects(Bucket="mybucket", MaxKeys=1, Marker=next_marker)
+ resp = s3_client.list_objects(Bucket="mybucket", MaxKeys=1, Marker=next_marker)
listed_object = resp["Contents"][0]
assert listed_object["Key"] == "two"
@@ -1180,12 +1178,12 @@ def test_list_objects_truncated_response():
@mock_s3
def test_list_keys_xml_escaped():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
key_name = "Q&A.txt"
- s3.put_object(Bucket="mybucket", Key=key_name, Body=b"is awesome")
+ s3_client.put_object(Bucket="mybucket", Key=key_name, Body=b"is awesome")
- resp = s3.list_objects_v2(Bucket="mybucket", Prefix=key_name)
+ resp = s3_client.list_objects_v2(Bucket="mybucket", Prefix=key_name)
assert resp["Contents"][0]["Key"] == key_name
assert resp["KeyCount"] == 1
@@ -1200,13 +1198,13 @@ def test_list_keys_xml_escaped():
@mock_s3
def test_list_objects_v2_common_prefix_pagination():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
max_keys = 1
keys = [f"test/{i}/{i}" for i in range(3)]
for key in keys:
- s3.put_object(Bucket="mybucket", Key=key, Body=b"v")
+ s3_client.put_object(Bucket="mybucket", Key=key, Body=b"v")
prefixes = []
args = {
@@ -1219,7 +1217,7 @@ def test_list_objects_v2_common_prefix_pagination():
while resp.get("IsTruncated", False):
if "NextContinuationToken" in resp:
args["ContinuationToken"] = resp["NextContinuationToken"]
- resp = s3.list_objects_v2(**args)
+ resp = s3_client.list_objects_v2(**args)
if "CommonPrefixes" in resp:
assert len(resp["CommonPrefixes"]) == max_keys
prefixes.extend(i["Prefix"] for i in resp["CommonPrefixes"])
@@ -1229,13 +1227,13 @@ def test_list_objects_v2_common_prefix_pagination():
@mock_s3
def test_list_objects_v2_common_invalid_continuation_token():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
max_keys = 1
keys = [f"test/{i}/{i}" for i in range(3)]
for key in keys:
- s3.put_object(Bucket="mybucket", Key=key, Body=b"v")
+ s3_client.put_object(Bucket="mybucket", Key=key, Body=b"v")
args = {
"Bucket": "mybucket",
@@ -1246,23 +1244,23 @@ def test_list_objects_v2_common_invalid_continuation_token():
}
with pytest.raises(botocore.exceptions.ClientError) as exc:
- s3.list_objects_v2(**args)
- exc.value.response["Error"]["Code"].should.equal("InvalidArgument")
- exc.value.response["Error"]["Message"].should.equal(
+ s3_client.list_objects_v2(**args)
+ assert exc.value.response["Error"]["Code"] == "InvalidArgument"
+ assert exc.value.response["Error"]["Message"] == (
"The continuation token provided is incorrect"
)
@mock_s3
def test_list_objects_v2_truncated_response():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- s3.put_object(Bucket="mybucket", Key="one", Body=b"1")
- s3.put_object(Bucket="mybucket", Key="two", Body=b"22")
- s3.put_object(Bucket="mybucket", Key="three", Body=b"333")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ s3_client.put_object(Bucket="mybucket", Key="one", Body=b"1")
+ s3_client.put_object(Bucket="mybucket", Key="two", Body=b"22")
+ s3_client.put_object(Bucket="mybucket", Key="three", Body=b"333")
# First list
- resp = s3.list_objects_v2(Bucket="mybucket", MaxKeys=1)
+ resp = s3_client.list_objects_v2(Bucket="mybucket", MaxKeys=1)
listed_object = resp["Contents"][0]
assert listed_object["Key"] == "one"
@@ -1277,7 +1275,7 @@ def test_list_objects_v2_truncated_response():
next_token = resp["NextContinuationToken"]
# Second list
- resp = s3.list_objects_v2(
+ resp = s3_client.list_objects_v2(
Bucket="mybucket", MaxKeys=1, ContinuationToken=next_token
)
listed_object = resp["Contents"][0]
@@ -1294,7 +1292,7 @@ def test_list_objects_v2_truncated_response():
next_token = resp["NextContinuationToken"]
# Third list
- resp = s3.list_objects_v2(
+ resp = s3_client.list_objects_v2(
Bucket="mybucket", MaxKeys=1, ContinuationToken=next_token
)
listed_object = resp["Contents"][0]
@@ -1312,14 +1310,14 @@ def test_list_objects_v2_truncated_response():
@mock_s3
def test_list_objects_v2_truncated_response_start_after():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- s3.put_object(Bucket="mybucket", Key="one", Body=b"1")
- s3.put_object(Bucket="mybucket", Key="two", Body=b"22")
- s3.put_object(Bucket="mybucket", Key="three", Body=b"333")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ s3_client.put_object(Bucket="mybucket", Key="one", Body=b"1")
+ s3_client.put_object(Bucket="mybucket", Key="two", Body=b"22")
+ s3_client.put_object(Bucket="mybucket", Key="three", Body=b"333")
# First list
- resp = s3.list_objects_v2(Bucket="mybucket", MaxKeys=1, StartAfter="one")
+ resp = s3_client.list_objects_v2(Bucket="mybucket", MaxKeys=1, StartAfter="one")
listed_object = resp["Contents"][0]
assert listed_object["Key"] == "three"
@@ -1335,7 +1333,7 @@ def test_list_objects_v2_truncated_response_start_after():
# Second list
# The ContinuationToken must take precedence over StartAfter.
- resp = s3.list_objects_v2(
+ resp = s3_client.list_objects_v2(
Bucket="mybucket", MaxKeys=1, StartAfter="one", ContinuationToken=next_token
)
listed_object = resp["Contents"][0]
@@ -1354,11 +1352,11 @@ def test_list_objects_v2_truncated_response_start_after():
@mock_s3
def test_list_objects_v2_fetch_owner():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- s3.put_object(Bucket="mybucket", Key="one", Body=b"11")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ s3_client.put_object(Bucket="mybucket", Key="one", Body=b"11")
- resp = s3.list_objects_v2(Bucket="mybucket", FetchOwner=True)
+ resp = s3_client.list_objects_v2(Bucket="mybucket", FetchOwner=True)
owner = resp["Contents"][0]["Owner"]
assert "ID" in owner
@@ -1368,14 +1366,16 @@ def test_list_objects_v2_fetch_owner():
@mock_s3
def test_list_objects_v2_truncate_combined_keys_and_folders():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- s3.put_object(Bucket="mybucket", Key="1/2", Body="")
- s3.put_object(Bucket="mybucket", Key="2", Body="")
- s3.put_object(Bucket="mybucket", Key="3/4", Body="")
- s3.put_object(Bucket="mybucket", Key="4", Body="")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ s3_client.put_object(Bucket="mybucket", Key="1/2", Body="")
+ s3_client.put_object(Bucket="mybucket", Key="2", Body="")
+ s3_client.put_object(Bucket="mybucket", Key="3/4", Body="")
+ s3_client.put_object(Bucket="mybucket", Key="4", Body="")
- resp = s3.list_objects_v2(Bucket="mybucket", Prefix="", MaxKeys=2, Delimiter="/")
+ resp = s3_client.list_objects_v2(
+ Bucket="mybucket", Prefix="", MaxKeys=2, Delimiter="/"
+ )
assert "Delimiter" in resp
assert resp["IsTruncated"] is True
assert resp["KeyCount"] == 2
@@ -1385,7 +1385,7 @@ def test_list_objects_v2_truncate_combined_keys_and_folders():
assert resp["CommonPrefixes"][0]["Prefix"] == "1/"
last_tail = resp["NextContinuationToken"]
- resp = s3.list_objects_v2(
+ resp = s3_client.list_objects_v2(
Bucket="mybucket", MaxKeys=2, Prefix="", Delimiter="/", StartAfter=last_tail
)
assert resp["KeyCount"] == 2
@@ -1398,172 +1398,174 @@ def test_list_objects_v2_truncate_combined_keys_and_folders():
@mock_s3
def test_list_objects_v2_checksum_algo():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
- resp = s3.put_object(Bucket="mybucket", Key="0", Body="a")
- resp.should_not.have.key("ChecksumCRC32")
- resp["ResponseMetadata"]["HTTPHeaders"].should_not.have.key(
- "x-amz-sdk-checksum-algorithm"
- )
- resp = s3.put_object(
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
+ resp = s3_client.put_object(Bucket="mybucket", Key="0", Body="a")
+ assert "ChecksumCRC32" not in resp
+ assert "x-amz-sdk-checksum-algorithm" not in resp["ResponseMetadata"]["HTTPHeaders"]
+ resp = s3_client.put_object(
Bucket="mybucket", Key="1", Body="a", ChecksumAlgorithm="CRC32"
)
- resp.should.have.key("ChecksumCRC32")
- resp["ResponseMetadata"]["HTTPHeaders"][
- "x-amz-sdk-checksum-algorithm"
- ].should.equal("CRC32")
- resp = s3.put_object(
+ assert "ChecksumCRC32" in resp
+ assert (
+ resp["ResponseMetadata"]["HTTPHeaders"]["x-amz-sdk-checksum-algorithm"]
+ == "CRC32"
+ )
+ resp = s3_client.put_object(
Bucket="mybucket", Key="2", Body="b", ChecksumAlgorithm="SHA256"
)
- resp.should.have.key("ChecksumSHA256")
- resp["ResponseMetadata"]["HTTPHeaders"][
- "x-amz-sdk-checksum-algorithm"
- ].should.equal("SHA256")
+ assert "ChecksumSHA256" in resp
+ assert (
+ resp["ResponseMetadata"]["HTTPHeaders"]["x-amz-sdk-checksum-algorithm"]
+ == "SHA256"
+ )
- resp = s3.list_objects_v2(Bucket="mybucket")["Contents"]
- resp[0].should_not.have.key("ChecksumAlgorithm")
- resp[1].should.have.key("ChecksumAlgorithm").equals(["CRC32"])
- resp[2].should.have.key("ChecksumAlgorithm").equals(["SHA256"])
+ resp = s3_client.list_objects_v2(Bucket="mybucket")["Contents"]
+ assert "ChecksumAlgorithm" not in resp[0]
+ assert resp[1]["ChecksumAlgorithm"] == ["CRC32"]
+ assert resp[2]["ChecksumAlgorithm"] == ["SHA256"]
@mock_s3
def test_bucket_create():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="blah")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource.create_bucket(Bucket="blah")
- s3.Object("blah", "hello.txt").put(Body="some text")
+ s3_resource.Object("blah", "hello.txt").put(Body="some text")
- s3.Object("blah", "hello.txt").get()["Body"].read().decode("utf-8").should.equal(
- "some text"
+ assert (
+ s3_resource.Object("blah", "hello.txt").get()["Body"].read().decode("utf-8")
+ == "some text"
)
@mock_s3
def test_bucket_create_force_us_east_1():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
with pytest.raises(ClientError) as exc:
- s3.create_bucket(
+ s3_resource.create_bucket(
Bucket="blah",
CreateBucketConfiguration={"LocationConstraint": DEFAULT_REGION_NAME},
)
- exc.value.response["Error"]["Code"].should.equal("InvalidLocationConstraint")
+ assert exc.value.response["Error"]["Code"] == "InvalidLocationConstraint"
@mock_s3
def test_bucket_create_eu_central():
- s3 = boto3.resource("s3", region_name="eu-central-1")
- s3.create_bucket(
+ s3_resource = boto3.resource("s3", region_name="eu-central-1")
+ s3_resource.create_bucket(
Bucket="blah", CreateBucketConfiguration={"LocationConstraint": "eu-central-1"}
)
- s3.Object("blah", "hello.txt").put(Body="some text")
+ s3_resource.Object("blah", "hello.txt").put(Body="some text")
- s3.Object("blah", "hello.txt").get()["Body"].read().decode("utf-8").should.equal(
- "some text"
+ assert (
+ s3_resource.Object("blah", "hello.txt").get()["Body"].read().decode("utf-8")
+ == "some text"
)
@mock_s3
def test_bucket_create_empty_bucket_configuration_should_return_malformed_xml_error():
- s3 = boto3.resource("s3", region_name="us-east-1")
- with pytest.raises(ClientError) as e:
- s3.create_bucket(Bucket="whatever", CreateBucketConfiguration={})
- e.value.response["Error"]["Code"].should.equal("MalformedXML")
- e.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
+ s3_resource = boto3.resource("s3", region_name="us-east-1")
+ with pytest.raises(ClientError) as exc:
+ s3_resource.create_bucket(Bucket="whatever", CreateBucketConfiguration={})
+ assert exc.value.response["Error"]["Code"] == "MalformedXML"
+ assert exc.value.response["ResponseMetadata"]["HTTPStatusCode"] == 400
@mock_s3
def test_head_object():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="blah")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource.create_bucket(Bucket="blah")
- s3.Object("blah", "hello.txt").put(Body="some text")
+ s3_resource.Object("blah", "hello.txt").put(Body="some text")
- s3.Object("blah", "hello.txt").meta.client.head_object(
+ s3_resource.Object("blah", "hello.txt").meta.client.head_object(
Bucket="blah", Key="hello.txt"
)
- with pytest.raises(ClientError) as e:
- s3.Object("blah", "hello2.txt").meta.client.head_object(
+ with pytest.raises(ClientError) as exc:
+ s3_resource.Object("blah", "hello2.txt").meta.client.head_object(
Bucket="blah", Key="hello_bad.txt"
)
- e.value.response["Error"]["Code"].should.equal("404")
+ assert exc.value.response["Error"]["Code"] == "404"
@mock_s3
def test_get_object():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="blah")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource.create_bucket(Bucket="blah")
- s3.Object("blah", "hello.txt").put(Body="some text")
+ s3_resource.Object("blah", "hello.txt").put(Body="some text")
- s3.Object("blah", "hello.txt").meta.client.head_object(
+ s3_resource.Object("blah", "hello.txt").meta.client.head_object(
Bucket="blah", Key="hello.txt"
)
- with pytest.raises(ClientError) as e:
- s3.Object("blah", "hello2.txt").get()
+ with pytest.raises(ClientError) as exc:
+ s3_resource.Object("blah", "hello2.txt").get()
- e.value.response["Error"]["Code"].should.equal("NoSuchKey")
+ assert exc.value.response["Error"]["Code"] == "NoSuchKey"
@mock_s3
def test_s3_content_type():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- my_bucket = s3.Bucket("my-cool-bucket")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ my_bucket = s3_resource.Bucket("my-cool-bucket")
my_bucket.create()
s3_path = "test_s3.py"
- s3 = boto3.resource("s3", verify=False)
+ s3_resource = boto3.resource("s3", verify=False)
content_type = "text/python-x"
- s3.Object(my_bucket.name, s3_path).put(
+ s3_resource.Object(my_bucket.name, s3_path).put(
ContentType=content_type, Body=b"some python code"
)
- s3.Object(my_bucket.name, s3_path).content_type.should.equal(content_type)
+ assert s3_resource.Object(my_bucket.name, s3_path).content_type == content_type
@mock_s3
def test_get_missing_object_with_part_number():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="blah")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource.create_bucket(Bucket="blah")
- with pytest.raises(ClientError) as e:
- s3.Object("blah", "hello.txt").meta.client.head_object(
+ with pytest.raises(ClientError) as exc:
+ s3_resource.Object("blah", "hello.txt").meta.client.head_object(
Bucket="blah", Key="hello.txt", PartNumber=123
)
- e.value.response["Error"]["Code"].should.equal("404")
+ assert exc.value.response["Error"]["Code"] == "404"
@mock_s3
def test_head_object_with_versioning():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.create_bucket(Bucket="blah")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ bucket = s3_resource.create_bucket(Bucket="blah")
bucket.Versioning().enable()
old_content = "some text"
new_content = "some new text"
- s3.Object("blah", "hello.txt").put(Body=old_content)
- s3.Object("blah", "hello.txt").put(Body=new_content)
+ s3_resource.Object("blah", "hello.txt").put(Body=old_content)
+ s3_resource.Object("blah", "hello.txt").put(Body=new_content)
- versions = list(s3.Bucket("blah").object_versions.all())
+ versions = list(s3_resource.Bucket("blah").object_versions.all())
latest = list(filter(lambda item: item.is_latest, versions))[0]
oldest = list(filter(lambda item: not item.is_latest, versions))[0]
- head_object = s3.Object("blah", "hello.txt").meta.client.head_object(
+ head_object = s3_resource.Object("blah", "hello.txt").meta.client.head_object(
Bucket="blah", Key="hello.txt"
)
- head_object["VersionId"].should.equal(latest.id)
- head_object["ContentLength"].should.equal(len(new_content))
+ assert head_object["VersionId"] == latest.id
+ assert head_object["ContentLength"] == len(new_content)
- old_head_object = s3.Object("blah", "hello.txt").meta.client.head_object(
+ old_head_object = s3_resource.Object("blah", "hello.txt").meta.client.head_object(
Bucket="blah", Key="hello.txt", VersionId=oldest.id
)
- old_head_object["VersionId"].should.equal(oldest.id)
- old_head_object["ContentLength"].should.equal(len(old_content))
+ assert old_head_object["VersionId"] == oldest.id
+ assert old_head_object["ContentLength"] == len(old_content)
- old_head_object["VersionId"].should_not.equal(head_object["VersionId"])
+ assert old_head_object["VersionId"] != head_object["VersionId"]
@mock_s3
@@ -1685,201 +1687,207 @@ def test_delete_versioned_bucket_returns_metadata():
@mock_s3
def test_get_object_if_modified_since_refresh():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
- response = s3.get_object(Bucket=bucket_name, Key=key)
+ response = s3_client.get_object(Bucket=bucket_name, Key=key)
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.get_object(
+ s3_client.get_object(
Bucket=bucket_name,
Key=key,
IfModifiedSince=response["LastModified"],
)
- e = err.value
- e.response["Error"].should.equal({"Code": "304", "Message": "Not Modified"})
+ err_value = err.value
+ assert err_value.response["Error"] == {"Code": "304", "Message": "Not Modified"}
@mock_s3
def test_get_object_if_modified_since():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.get_object(
+ s3_client.get_object(
Bucket=bucket_name,
Key=key,
IfModifiedSince=datetime.datetime.utcnow() + datetime.timedelta(hours=1),
)
- e = err.value
- e.response["Error"].should.equal({"Code": "304", "Message": "Not Modified"})
+ err_value = err.value
+ assert err_value.response["Error"] == {"Code": "304", "Message": "Not Modified"}
@mock_s3
def test_get_object_if_unmodified_since():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.get_object(
+ s3_client.get_object(
Bucket=bucket_name,
Key=key,
IfUnmodifiedSince=datetime.datetime.utcnow() - datetime.timedelta(hours=1),
)
- e = err.value
- e.response["Error"]["Code"].should.equal("PreconditionFailed")
- e.response["Error"]["Condition"].should.equal("If-Unmodified-Since")
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "PreconditionFailed"
+ assert err_value.response["Error"]["Condition"] == "If-Unmodified-Since"
@mock_s3
def test_get_object_if_match():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.get_object(Bucket=bucket_name, Key=key, IfMatch='"hello"')
- e = err.value
- e.response["Error"]["Code"].should.equal("PreconditionFailed")
- e.response["Error"]["Condition"].should.equal("If-Match")
+ s3_client.get_object(Bucket=bucket_name, Key=key, IfMatch='"hello"')
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "PreconditionFailed"
+ assert err_value.response["Error"]["Condition"] == "If-Match"
@mock_s3
def test_get_object_if_none_match():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- etag = s3.put_object(Bucket=bucket_name, Key=key, Body="test")["ETag"]
+ etag = s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")["ETag"]
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.get_object(Bucket=bucket_name, Key=key, IfNoneMatch=etag)
- e = err.value
- e.response["Error"].should.equal({"Code": "304", "Message": "Not Modified"})
+ s3_client.get_object(Bucket=bucket_name, Key=key, IfNoneMatch=etag)
+ err_value = err.value
+ assert err_value.response["Error"] == {"Code": "304", "Message": "Not Modified"}
@mock_s3
def test_head_object_if_modified_since():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.head_object(
+ s3_client.head_object(
Bucket=bucket_name,
Key=key,
IfModifiedSince=datetime.datetime.utcnow() + datetime.timedelta(hours=1),
)
- e = err.value
- e.response["Error"].should.equal({"Code": "304", "Message": "Not Modified"})
+ err_value = err.value
+ assert err_value.response["Error"] == {"Code": "304", "Message": "Not Modified"}
@mock_s3
def test_head_object_if_modified_since_refresh():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
- response = s3.head_object(Bucket=bucket_name, Key=key)
+ response = s3_client.head_object(Bucket=bucket_name, Key=key)
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.head_object(
+ s3_client.head_object(
Bucket=bucket_name,
Key=key,
IfModifiedSince=response["LastModified"],
)
- e = err.value
- e.response["Error"].should.equal({"Code": "304", "Message": "Not Modified"})
+ err_value = err.value
+ assert err_value.response["Error"] == {"Code": "304", "Message": "Not Modified"}
@mock_s3
def test_head_object_if_unmodified_since():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.head_object(
+ s3_client.head_object(
Bucket=bucket_name,
Key=key,
IfUnmodifiedSince=datetime.datetime.utcnow() - datetime.timedelta(hours=1),
)
- e = err.value
- e.response["Error"].should.equal({"Code": "412", "Message": "Precondition Failed"})
+ err_value = err.value
+ assert err_value.response["Error"] == {
+ "Code": "412",
+ "Message": "Precondition Failed",
+ }
@mock_s3
def test_head_object_if_match():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.head_object(Bucket=bucket_name, Key=key, IfMatch='"hello"')
- e = err.value
- e.response["Error"].should.equal({"Code": "412", "Message": "Precondition Failed"})
+ s3_client.head_object(Bucket=bucket_name, Key=key, IfMatch='"hello"')
+ err_value = err.value
+ assert err_value.response["Error"] == {
+ "Code": "412",
+ "Message": "Precondition Failed",
+ }
@mock_s3
def test_head_object_if_none_match():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
key = "hello.txt"
- etag = s3.put_object(Bucket=bucket_name, Key=key, Body="test")["ETag"]
+ etag = s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")["ETag"]
with pytest.raises(botocore.exceptions.ClientError) as err:
- s3.head_object(Bucket=bucket_name, Key=key, IfNoneMatch=etag)
- e = err.value
- e.response["Error"].should.equal({"Code": "304", "Message": "Not Modified"})
+ s3_client.head_object(Bucket=bucket_name, Key=key, IfNoneMatch=etag)
+ err_value = err.value
+ assert err_value.response["Error"] == {"Code": "304", "Message": "Not Modified"}
@mock_s3
def test_put_bucket_cors():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
- resp = s3.put_bucket_cors(
+ resp = s3_client.put_bucket_cors(
Bucket=bucket_name,
CORSConfiguration={
"CORSRules": [
@@ -1901,10 +1909,10 @@ def test_put_bucket_cors():
},
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
with pytest.raises(ClientError) as err:
- s3.put_bucket_cors(
+ s3_client.put_bucket_cors(
Bucket=bucket_name,
CORSConfiguration={
"CORSRules": [
@@ -1912,42 +1920,47 @@ def test_put_bucket_cors():
]
},
)
- e = err.value
- e.response["Error"]["Code"].should.equal("InvalidRequest")
- e.response["Error"]["Message"].should.equal(
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "InvalidRequest"
+ assert err_value.response["Error"]["Message"] == (
"Found unsupported HTTP method in CORS config. " "Unsupported method is NOTREAL"
)
with pytest.raises(ClientError) as err:
- s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={"CORSRules": []})
- e = err.value
- e.response["Error"]["Code"].should.equal("MalformedXML")
+ s3_client.put_bucket_cors(
+ Bucket=bucket_name, CORSConfiguration={"CORSRules": []}
+ )
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "MalformedXML"
# And 101:
many_rules = [{"AllowedOrigins": ["*"], "AllowedMethods": ["GET"]}] * 101
with pytest.raises(ClientError) as err:
- s3.put_bucket_cors(
+ s3_client.put_bucket_cors(
Bucket=bucket_name, CORSConfiguration={"CORSRules": many_rules}
)
- e = err.value
- e.response["Error"]["Code"].should.equal("MalformedXML")
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "MalformedXML"
@mock_s3
def test_get_bucket_cors():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
# Without CORS:
with pytest.raises(ClientError) as err:
- s3.get_bucket_cors(Bucket=bucket_name)
+ s3_client.get_bucket_cors(Bucket=bucket_name)
- e = err.value
- e.response["Error"]["Code"].should.equal("NoSuchCORSConfiguration")
- e.response["Error"]["Message"].should.equal("The CORS configuration does not exist")
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "NoSuchCORSConfiguration"
+ assert (
+ err_value.response["Error"]["Message"]
+ == "The CORS configuration does not exist"
+ )
- s3.put_bucket_cors(
+ s3_client.put_bucket_cors(
Bucket=bucket_name,
CORSConfiguration={
"CORSRules": [
@@ -1969,48 +1982,51 @@ def test_get_bucket_cors():
},
)
- resp = s3.get_bucket_cors(Bucket=bucket_name)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
- len(resp["CORSRules"]).should.equal(2)
+ resp = s3_client.get_bucket_cors(Bucket=bucket_name)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
+ assert len(resp["CORSRules"]) == 2
@mock_s3
def test_delete_bucket_cors():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_cors(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_cors(
Bucket=bucket_name,
CORSConfiguration={
"CORSRules": [{"AllowedOrigins": ["*"], "AllowedMethods": ["GET"]}]
},
)
- resp = s3.delete_bucket_cors(Bucket=bucket_name)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(204)
+ resp = s3_client.delete_bucket_cors(Bucket=bucket_name)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 204
# Verify deletion:
with pytest.raises(ClientError) as err:
- s3.get_bucket_cors(Bucket=bucket_name)
+ s3_client.get_bucket_cors(Bucket=bucket_name)
- e = err.value
- e.response["Error"]["Code"].should.equal("NoSuchCORSConfiguration")
- e.response["Error"]["Message"].should.equal("The CORS configuration does not exist")
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "NoSuchCORSConfiguration"
+ assert (
+ err_value.response["Error"]["Message"]
+ == "The CORS configuration does not exist"
+ )
@mock_s3
def test_put_bucket_notification():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="bucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="bucket")
# With no configuration:
- result = s3.get_bucket_notification(Bucket="bucket")
+ result = s3_client.get_bucket_notification(Bucket="bucket")
assert not result.get("TopicConfigurations")
assert not result.get("QueueConfigurations")
assert not result.get("LambdaFunctionConfigurations")
# Place proper topic configuration:
- s3.put_bucket_notification_configuration(
+ s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
"TopicConfigurations": [
@@ -2035,7 +2051,7 @@ def test_put_bucket_notification():
)
# Verify to completion:
- result = s3.get_bucket_notification_configuration(Bucket="bucket")
+ result = s3_client.get_bucket_notification_configuration(Bucket="bucket")
assert len(result["TopicConfigurations"]) == 2
assert not result.get("QueueConfigurations")
assert not result.get("LambdaFunctionConfigurations")
@@ -2074,7 +2090,7 @@ def test_put_bucket_notification():
)
# Place proper queue configuration:
- s3.put_bucket_notification_configuration(
+ s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
"QueueConfigurations": [
@@ -2089,7 +2105,7 @@ def test_put_bucket_notification():
]
},
)
- result = s3.get_bucket_notification_configuration(Bucket="bucket")
+ result = s3_client.get_bucket_notification_configuration(Bucket="bucket")
assert len(result["QueueConfigurations"]) == 1
assert not result.get("TopicConfigurations")
assert not result.get("LambdaFunctionConfigurations")
@@ -2111,7 +2127,7 @@ def test_put_bucket_notification():
)
# Place proper Lambda configuration:
- s3.put_bucket_notification_configuration(
+ s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
"LambdaFunctionConfigurations": [
@@ -2125,7 +2141,7 @@ def test_put_bucket_notification():
]
},
)
- result = s3.get_bucket_notification_configuration(Bucket="bucket")
+ result = s3_client.get_bucket_notification_configuration(Bucket="bucket")
assert len(result["LambdaFunctionConfigurations"]) == 1
assert not result.get("TopicConfigurations")
assert not result.get("QueueConfigurations")
@@ -2156,7 +2172,7 @@ def test_put_bucket_notification():
)
# And with all 3 set:
- s3.put_bucket_notification_configuration(
+ s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
"TopicConfigurations": [
@@ -2179,16 +2195,16 @@ def test_put_bucket_notification():
],
},
)
- result = s3.get_bucket_notification_configuration(Bucket="bucket")
+ result = s3_client.get_bucket_notification_configuration(Bucket="bucket")
assert len(result["LambdaFunctionConfigurations"]) == 1
assert len(result["TopicConfigurations"]) == 1
assert len(result["QueueConfigurations"]) == 1
# And clear it out:
- s3.put_bucket_notification_configuration(
+ s3_client.put_bucket_notification_configuration(
Bucket="bucket", NotificationConfiguration={}
)
- result = s3.get_bucket_notification_configuration(Bucket="bucket")
+ result = s3_client.get_bucket_notification_configuration(Bucket="bucket")
assert not result.get("TopicConfigurations")
assert not result.get("QueueConfigurations")
assert not result.get("LambdaFunctionConfigurations")
@@ -2196,13 +2212,13 @@ def test_put_bucket_notification():
@mock_s3
def test_put_bucket_notification_errors():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="bucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="bucket")
# With incorrect ARNs:
for tech in ["Queue", "Topic", "LambdaFunction"]:
with pytest.raises(ClientError) as err:
- s3.put_bucket_notification_configuration(
+ s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
f"{tech}Configurations": [
@@ -2219,7 +2235,7 @@ def test_put_bucket_notification_errors():
# Region not the same as the bucket:
with pytest.raises(ClientError) as err:
- s3.put_bucket_notification_configuration(
+ s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
"QueueConfigurations": [
@@ -2232,14 +2248,14 @@ def test_put_bucket_notification_errors():
)
assert err.value.response["Error"]["Code"] == "InvalidArgument"
- assert (
- err.value.response["Error"]["Message"]
- == "The notification destination service region is not valid for the bucket location constraint"
+ assert err.value.response["Error"]["Message"] == (
+ "The notification destination service region is not valid for "
+ "the bucket location constraint"
)
# Invalid event name:
with pytest.raises(ClientError) as err:
- s3.put_bucket_notification_configuration(
+ s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
"QueueConfigurations": [
@@ -2259,152 +2275,144 @@ def test_put_bucket_notification_errors():
@mock_s3
def test_list_object_versions():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "000" + str(uuid4())
key = "key-with-versions"
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_versioning(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_versioning(
Bucket=bucket_name, VersioningConfiguration={"Status": "Enabled"}
)
items = (b"v1", b"v2")
for body in items:
- s3.put_object(Bucket=bucket_name, Key=key, Body=body)
- response = s3.list_object_versions(Bucket=bucket_name)
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=body)
+ response = s3_client.list_object_versions(Bucket=bucket_name)
# Two object versions should be returned
- len(response["Versions"]).should.equal(2)
- keys = set([item["Key"] for item in response["Versions"]])
- keys.should.equal({key})
+ assert len(response["Versions"]) == 2
+ keys = {item["Key"] for item in response["Versions"]}
+ assert keys == {key}
# the first item in the list should be the latest
- response["Versions"][0]["IsLatest"].should.equal(True)
+ assert response["Versions"][0]["IsLatest"] is True
# Test latest object version is returned
- response = s3.get_object(Bucket=bucket_name, Key=key)
- response["Body"].read().should.equal(items[-1])
+ response = s3_client.get_object(Bucket=bucket_name, Key=key)
+ assert response["Body"].read() == items[-1]
@mock_s3
def test_list_object_versions_with_delimiter():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "000" + str(uuid4())
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_versioning(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_versioning(
Bucket=bucket_name, VersioningConfiguration={"Status": "Enabled"}
)
for key_index in list(range(1, 5)) + list(range(10, 14)):
for version_index in range(1, 4):
body = f"data-{version_index}".encode("UTF-8")
- s3.put_object(
+ s3_client.put_object(
Bucket=bucket_name, Key=f"key{key_index}-with-data", Body=body
)
- s3.put_object(
+ s3_client.put_object(
Bucket=bucket_name, Key=f"key{key_index}-without-data", Body=b""
)
- response = s3.list_object_versions(Bucket=bucket_name)
+ response = s3_client.list_object_versions(Bucket=bucket_name)
# All object versions should be returned
- len(response["Versions"]).should.equal(
- 48
- ) # 8 keys * 2 (one with, one without) * 3 versions per key
+ # 8 keys * 2 (one with, one without) * 3 versions per key
+ assert len(response["Versions"]) == 48
# Use start of key as delimiter
- response = s3.list_object_versions(Bucket=bucket_name, Delimiter="key1")
- response.should.have.key("CommonPrefixes").equal([{"Prefix": "key1"}])
- response.should.have.key("Delimiter").equal("key1")
+ response = s3_client.list_object_versions(Bucket=bucket_name, Delimiter="key1")
+ assert response["CommonPrefixes"] == [{"Prefix": "key1"}]
+ assert response["Delimiter"] == "key1"
# 3 keys that do not contain the phrase 'key1' (key2, key3, key4) * * 2 * 3
- response.should.have.key("Versions").length_of(18)
+ assert len(response["Versions"]) == 18
# Use in-between key as delimiter
- response = s3.list_object_versions(Bucket=bucket_name, Delimiter="-with-")
- response.should.have.key("CommonPrefixes").equal(
- [
- {"Prefix": "key1-with-"},
- {"Prefix": "key10-with-"},
- {"Prefix": "key11-with-"},
- {"Prefix": "key12-with-"},
- {"Prefix": "key13-with-"},
- {"Prefix": "key2-with-"},
- {"Prefix": "key3-with-"},
- {"Prefix": "key4-with-"},
- ]
- )
- response.should.have.key("Delimiter").equal("-with-")
+ response = s3_client.list_object_versions(Bucket=bucket_name, Delimiter="-with-")
+ assert response["CommonPrefixes"] == [
+ {"Prefix": "key1-with-"},
+ {"Prefix": "key10-with-"},
+ {"Prefix": "key11-with-"},
+ {"Prefix": "key12-with-"},
+ {"Prefix": "key13-with-"},
+ {"Prefix": "key2-with-"},
+ {"Prefix": "key3-with-"},
+ {"Prefix": "key4-with-"},
+ ]
+
+ assert response["Delimiter"] == "-with-"
# key(1/10/11/12/13)-without, key(2/3/4)-without
- response.should.have.key("Versions").length_of(8 * 1 * 3)
+ assert len(response["Versions"]) == (8 * 1 * 3)
# Use in-between key as delimiter
- response = s3.list_object_versions(Bucket=bucket_name, Delimiter="1-with-")
- response.should.have.key("CommonPrefixes").equal(
+ response = s3_client.list_object_versions(Bucket=bucket_name, Delimiter="1-with-")
+ assert response["CommonPrefixes"] == (
[{"Prefix": "key1-with-"}, {"Prefix": "key11-with-"}]
)
- response.should.have.key("Delimiter").equal("1-with-")
- response.should.have.key("Versions").length_of(42)
- all_keys = set([v["Key"] for v in response["Versions"]])
- all_keys.should.contain("key1-without-data")
- all_keys.shouldnt.contain("key1-with-data")
- all_keys.should.contain("key4-with-data")
- all_keys.should.contain("key4-without-data")
+ assert response["Delimiter"] == "1-with-"
+ assert len(response["Versions"]) == 42
+ all_keys = {v["Key"] for v in response["Versions"]}
+ assert "key1-without-data" in all_keys
+ assert "key1-with-data" not in all_keys
+ assert "key4-with-data" in all_keys
+ assert "key4-without-data" in all_keys
# Use in-between key as delimiter + prefix
- response = s3.list_object_versions(
+ response = s3_client.list_object_versions(
Bucket=bucket_name, Prefix="key1", Delimiter="with-"
)
- response.should.have.key("CommonPrefixes").equal(
- [
- {"Prefix": "key1-with-"},
- {"Prefix": "key10-with-"},
- {"Prefix": "key11-with-"},
- {"Prefix": "key12-with-"},
- {"Prefix": "key13-with-"},
- ]
- )
- response.should.have.key("Delimiter").equal("with-")
- response.should.have.key("KeyMarker").equal("")
- response.shouldnt.have.key("NextKeyMarker")
- response.should.have.key("Versions").length_of(15)
- all_keys = set([v["Key"] for v in response["Versions"]])
- all_keys.should.equal(
- {
- "key1-without-data",
- "key10-without-data",
- "key11-without-data",
- "key13-without-data",
- "key12-without-data",
- }
- )
+ assert response["CommonPrefixes"] == [
+ {"Prefix": "key1-with-"},
+ {"Prefix": "key10-with-"},
+ {"Prefix": "key11-with-"},
+ {"Prefix": "key12-with-"},
+ {"Prefix": "key13-with-"},
+ ]
+ assert response["Delimiter"] == "with-"
+ assert response["KeyMarker"] == ""
+ assert "NextKeyMarker" not in response
+ assert len(response["Versions"]) == 15
+ all_keys = {v["Key"] for v in response["Versions"]}
+ assert all_keys == {
+ "key1-without-data",
+ "key10-without-data",
+ "key11-without-data",
+ "key13-without-data",
+ "key12-without-data",
+ }
# Start at KeyMarker, and filter using Prefix+Delimiter for all subsequent keys
- response = s3.list_object_versions(
+ response = s3_client.list_object_versions(
Bucket=bucket_name, Prefix="key1", Delimiter="with-", KeyMarker="key11"
)
- response.should.have.key("CommonPrefixes").equal(
- [
- {"Prefix": "key11-with-"},
- {"Prefix": "key12-with-"},
- {"Prefix": "key13-with-"},
- ]
- )
- response.should.have.key("Delimiter").equal("with-")
- response.should.have.key("KeyMarker").equal("key11")
- response.shouldnt.have.key("NextKeyMarker")
- response.should.have.key("Versions").length_of(9)
- all_keys = set([v["Key"] for v in response["Versions"]])
- all_keys.should.equal(
+ assert response["CommonPrefixes"] == [
+ {"Prefix": "key11-with-"},
+ {"Prefix": "key12-with-"},
+ {"Prefix": "key13-with-"},
+ ]
+ assert response["Delimiter"] == "with-"
+ assert response["KeyMarker"] == "key11"
+ assert "NextKeyMarker" not in response
+ assert len(response["Versions"]) == 9
+ all_keys = {v["Key"] for v in response["Versions"]}
+ assert all_keys == (
{"key11-without-data", "key12-without-data", "key13-without-data"}
)
# Delimiter with Prefix being the entire key
- response = s3.list_object_versions(
+ response = s3_client.list_object_versions(
Bucket=bucket_name, Prefix="key1-with-data", Delimiter="-"
)
- response.should.have.key("Versions").length_of(3)
- response.shouldnt.have.key("CommonPrefixes")
+ assert len(response["Versions"]) == 3
+ assert "CommonPrefixes" not in response
# Delimiter without prefix
- response = s3.list_object_versions(Bucket=bucket_name, Delimiter="-with-")
- response["CommonPrefixes"].should.have.length_of(8)
- response["CommonPrefixes"].should.contain({"Prefix": "key1-with-"})
+ response = s3_client.list_object_versions(Bucket=bucket_name, Delimiter="-with-")
+ assert len(response["CommonPrefixes"]) == 8
+ assert {"Prefix": "key1-with-"} in response["CommonPrefixes"]
# Should return all keys -without-data
- response.should.have.key("Versions").length_of(24)
+ assert len(response["Versions"]) == 24
@mock_s3
@@ -2447,176 +2455,176 @@ def test_list_object_versions_with_delimiter_for_deleted_objects():
# Verify we only retrieve the DeleteMarkers that have this prefix
objs = client.list_object_versions(Bucket=bucket_name)
- [dm["Key"] for dm in objs["DeleteMarkers"]].should.equal(["del_obj_0", "del_obj_1"])
+ assert [dm["Key"] for dm in objs["DeleteMarkers"]] == ["del_obj_0", "del_obj_1"]
hist_objs = client.list_object_versions(Bucket=bucket_name, Prefix="hist_obj")
- hist_objs.shouldnt.have.key("DeleteMarkers")
+ assert "DeleteMarkers" not in hist_objs
del_objs = client.list_object_versions(Bucket=bucket_name, Prefix="del_obj_0")
- [dm["Key"] for dm in del_objs["DeleteMarkers"]].should.equal(["del_obj_0"])
+ assert [dm["Key"] for dm in del_objs["DeleteMarkers"]] == ["del_obj_0"]
@mock_s3
def test_list_object_versions_with_versioning_disabled():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
items = (b"v1", b"v2")
for body in items:
- s3.put_object(Bucket=bucket_name, Key=key, Body=body)
- response = s3.list_object_versions(Bucket=bucket_name)
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=body)
+ response = s3_client.list_object_versions(Bucket=bucket_name)
# One object version should be returned
- len(response["Versions"]).should.equal(1)
- response["Versions"][0]["Key"].should.equal(key)
+ assert len(response["Versions"]) == 1
+ assert response["Versions"][0]["Key"] == key
# The version id should be the string null
- response["Versions"][0]["VersionId"].should.equal("null")
+ assert response["Versions"][0]["VersionId"] == "null"
# Test latest object version is returned
- response = s3.get_object(Bucket=bucket_name, Key=key)
+ response = s3_client.get_object(Bucket=bucket_name, Key=key)
assert "VersionId" not in response["ResponseMetadata"]["HTTPHeaders"]
- response["Body"].read().should.equal(items[-1])
+ assert response["Body"].read() == items[-1]
@mock_s3
def test_list_object_versions_with_versioning_enabled_late():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
items = (b"v1", b"v2")
- s3.put_object(Bucket=bucket_name, Key=key, Body=b"v1")
- s3.put_bucket_versioning(
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=b"v1")
+ s3_client.put_bucket_versioning(
Bucket=bucket_name, VersioningConfiguration={"Status": "Enabled"}
)
- s3.put_object(Bucket=bucket_name, Key=key, Body=b"v2")
- response = s3.list_object_versions(Bucket=bucket_name)
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=b"v2")
+ response = s3_client.list_object_versions(Bucket=bucket_name)
# Two object versions should be returned
- len(response["Versions"]).should.equal(2)
- keys = set([item["Key"] for item in response["Versions"]])
- keys.should.equal({key})
+ assert len(response["Versions"]) == 2
+ keys = {item["Key"] for item in response["Versions"]}
+ assert keys == {key}
# There should still be a null version id.
- versionsId = set([item["VersionId"] for item in response["Versions"]])
- versionsId.should.contain("null")
+ versions_id = {item["VersionId"] for item in response["Versions"]}
+ assert "null" in versions_id
# Test latest object version is returned
- response = s3.get_object(Bucket=bucket_name, Key=key)
- response["Body"].read().should.equal(items[-1])
+ response = s3_client.get_object(Bucket=bucket_name, Key=key)
+ assert response["Body"].read() == items[-1]
@mock_s3
def test_bad_prefix_list_object_versions():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions"
bad_prefix = "key-that-does-not-exist"
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_versioning(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_versioning(
Bucket=bucket_name, VersioningConfiguration={"Status": "Enabled"}
)
items = (b"v1", b"v2")
for body in items:
- s3.put_object(Bucket=bucket_name, Key=key, Body=body)
- response = s3.list_object_versions(Bucket=bucket_name, Prefix=bad_prefix)
- response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
- response.should_not.contain("Versions")
- response.should_not.contain("DeleteMarkers")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=body)
+ response = s3_client.list_object_versions(Bucket=bucket_name, Prefix=bad_prefix)
+ assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
+ assert "Versions" not in response
+ assert "DeleteMarkers" not in response
@mock_s3
def test_delete_markers():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions-and-unicode-ó"
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_versioning(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_versioning(
Bucket=bucket_name, VersioningConfiguration={"Status": "Enabled"}
)
items = (b"v1", b"v2")
for body in items:
- s3.put_object(Bucket=bucket_name, Key=key, Body=body)
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=body)
- s3.delete_objects(Bucket=bucket_name, Delete={"Objects": [{"Key": key}]})
+ s3_client.delete_objects(Bucket=bucket_name, Delete={"Objects": [{"Key": key}]})
- with pytest.raises(ClientError) as e:
- s3.get_object(Bucket=bucket_name, Key=key)
- e.value.response["Error"]["Code"].should.equal("NoSuchKey")
+ with pytest.raises(ClientError) as exc:
+ s3_client.get_object(Bucket=bucket_name, Key=key)
+ assert exc.value.response["Error"]["Code"] == "NoSuchKey"
- response = s3.list_object_versions(Bucket=bucket_name)
- response["Versions"].should.have.length_of(2)
- response["DeleteMarkers"].should.have.length_of(1)
+ response = s3_client.list_object_versions(Bucket=bucket_name)
+ assert len(response["Versions"]) == 2
+ assert len(response["DeleteMarkers"]) == 1
- s3.delete_object(
+ s3_client.delete_object(
Bucket=bucket_name, Key=key, VersionId=response["DeleteMarkers"][0]["VersionId"]
)
- response = s3.get_object(Bucket=bucket_name, Key=key)
- response["Body"].read().should.equal(items[-1])
+ response = s3_client.get_object(Bucket=bucket_name, Key=key)
+ assert response["Body"].read() == items[-1]
- response = s3.list_object_versions(Bucket=bucket_name)
- response["Versions"].should.have.length_of(2)
+ response = s3_client.list_object_versions(Bucket=bucket_name)
+ assert len(response["Versions"]) == 2
# We've asserted there is only 2 records so one is newest, one is oldest
latest = list(filter(lambda item: item["IsLatest"], response["Versions"]))[0]
oldest = list(filter(lambda item: not item["IsLatest"], response["Versions"]))[0]
# Double check ordering of version ID's
- latest["VersionId"].should_not.equal(oldest["VersionId"])
+ assert latest["VersionId"] != oldest["VersionId"]
# Double check the name is still unicode
- latest["Key"].should.equal("key-with-versions-and-unicode-ó")
- oldest["Key"].should.equal("key-with-versions-and-unicode-ó")
+ assert latest["Key"] == "key-with-versions-and-unicode-ó"
+ assert oldest["Key"] == "key-with-versions-and-unicode-ó"
@mock_s3
def test_multiple_delete_markers():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions-and-unicode-ó"
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_versioning(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_versioning(
Bucket=bucket_name, VersioningConfiguration={"Status": "Enabled"}
)
items = (b"v1", b"v2")
for body in items:
- s3.put_object(Bucket=bucket_name, Key=key, Body=body)
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=body)
# Delete the object twice to add multiple delete markers
- s3.delete_object(Bucket=bucket_name, Key=key)
- s3.delete_object(Bucket=bucket_name, Key=key)
+ s3_client.delete_object(Bucket=bucket_name, Key=key)
+ s3_client.delete_object(Bucket=bucket_name, Key=key)
- response = s3.list_object_versions(Bucket=bucket_name)
- response["DeleteMarkers"].should.have.length_of(2)
+ response = s3_client.list_object_versions(Bucket=bucket_name)
+ assert len(response["DeleteMarkers"]) == 2
- with pytest.raises(ClientError) as e:
- s3.get_object(Bucket=bucket_name, Key=key)
- e.response["Error"]["Code"].should.equal("404")
+ with pytest.raises(ClientError) as exc:
+ s3_client.get_object(Bucket=bucket_name, Key=key)
+ assert exc.response["Error"]["Code"] == "404"
# Remove both delete markers to restore the object
- s3.delete_object(
+ s3_client.delete_object(
Bucket=bucket_name, Key=key, VersionId=response["DeleteMarkers"][0]["VersionId"]
)
- s3.delete_object(
+ s3_client.delete_object(
Bucket=bucket_name, Key=key, VersionId=response["DeleteMarkers"][1]["VersionId"]
)
- response = s3.get_object(Bucket=bucket_name, Key=key)
- response["Body"].read().should.equal(items[-1])
- response = s3.list_object_versions(Bucket=bucket_name)
- response["Versions"].should.have.length_of(2)
+ response = s3_client.get_object(Bucket=bucket_name, Key=key)
+ assert response["Body"].read() == items[-1]
+ response = s3_client.list_object_versions(Bucket=bucket_name)
+ assert len(response["Versions"]) == 2
# We've asserted there is only 2 records so one is newest, one is oldest
latest = list(filter(lambda item: item["IsLatest"], response["Versions"]))[0]
oldest = list(filter(lambda item: not item["IsLatest"], response["Versions"]))[0]
# Double check ordering of version ID's
- latest["VersionId"].should_not.equal(oldest["VersionId"])
+ assert latest["VersionId"] != oldest["VersionId"]
# Double check the name is still unicode
- latest["Key"].should.equal("key-with-versions-and-unicode-ó")
- oldest["Key"].should.equal("key-with-versions-and-unicode-ó")
+ assert latest["Key"] == "key-with-versions-and-unicode-ó"
+ assert oldest["Key"] == "key-with-versions-and-unicode-ó"
@mock_s3
@@ -2626,8 +2634,8 @@ def test_get_stream_gzipped():
s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3_client.create_bucket(Bucket="moto-tests")
buffer_ = BytesIO()
- with GzipFile(fileobj=buffer_, mode="w") as f:
- f.write(payload)
+ with GzipFile(fileobj=buffer_, mode="w") as fhandle:
+ fhandle.write(payload)
payload_gz = buffer_.getvalue()
s3_client.put_object(
@@ -2661,116 +2669,110 @@ TEST_XML = """\
@mock_s3
def test_bucket_name_too_long():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
with pytest.raises(ClientError) as exc:
- s3.create_bucket(Bucket="x" * 64)
- exc.value.response["Error"]["Code"].should.equal("InvalidBucketName")
+ s3_client.create_bucket(Bucket="x" * 64)
+ assert exc.value.response["Error"]["Code"] == "InvalidBucketName"
@mock_s3
def test_bucket_name_too_short():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
with pytest.raises(ClientError) as exc:
- s3.create_bucket(Bucket="x" * 2)
- exc.value.response["Error"]["Code"].should.equal("InvalidBucketName")
+ s3_client.create_bucket(Bucket="x" * 2)
+ assert exc.value.response["Error"]["Code"] == "InvalidBucketName"
@mock_s3
def test_accelerated_none_when_unspecified():
bucket_name = "some_bucket"
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
- resp = s3.get_bucket_accelerate_configuration(Bucket=bucket_name)
- resp.shouldnt.have.key("Status")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
+ resp = s3_client.get_bucket_accelerate_configuration(Bucket=bucket_name)
+ assert "Status" not in resp
@mock_s3
def test_can_enable_bucket_acceleration():
bucket_name = "some_bucket"
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
- resp = s3.put_bucket_accelerate_configuration(
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
+ resp = s3_client.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "Enabled"}
)
- resp.keys().should.have.length_of(
- 1
- ) # Response contains nothing (only HTTP headers)
- resp = s3.get_bucket_accelerate_configuration(Bucket=bucket_name)
- resp.should.have.key("Status")
- resp["Status"].should.equal("Enabled")
+ assert len(resp.keys()) == 1 # Response contains nothing (only HTTP headers)
+ resp = s3_client.get_bucket_accelerate_configuration(Bucket=bucket_name)
+ assert "Status" in resp
+ assert resp["Status"] == "Enabled"
@mock_s3
def test_can_suspend_bucket_acceleration():
bucket_name = "some_bucket"
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
- resp = s3.put_bucket_accelerate_configuration(
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
+ resp = s3_client.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "Enabled"}
)
- resp = s3.put_bucket_accelerate_configuration(
+ resp = s3_client.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "Suspended"}
)
- resp.keys().should.have.length_of(
- 1
- ) # Response contains nothing (only HTTP headers)
- resp = s3.get_bucket_accelerate_configuration(Bucket=bucket_name)
- resp.should.have.key("Status")
- resp["Status"].should.equal("Suspended")
+ assert len(resp.keys()) == 1 # Response contains nothing (only HTTP headers)
+ resp = s3_client.get_bucket_accelerate_configuration(Bucket=bucket_name)
+ assert "Status" in resp
+ assert resp["Status"] == "Suspended"
@mock_s3
def test_suspending_acceleration_on_not_configured_bucket_does_nothing():
bucket_name = "some_bucket"
- s3 = boto3.client("s3")
- s3.create_bucket(
+ s3_client = boto3.client("s3")
+ s3_client.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": "us-west-1"},
)
- resp = s3.put_bucket_accelerate_configuration(
+ resp = s3_client.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "Suspended"}
)
- resp.keys().should.have.length_of(
- 1
- ) # Response contains nothing (only HTTP headers)
- resp = s3.get_bucket_accelerate_configuration(Bucket=bucket_name)
- resp.shouldnt.have.key("Status")
+ assert len(resp.keys()) == 1 # Response contains nothing (only HTTP headers)
+ resp = s3_client.get_bucket_accelerate_configuration(Bucket=bucket_name)
+ assert "Status" not in resp
@mock_s3
def test_accelerate_configuration_status_validation():
bucket_name = "some_bucket"
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
with pytest.raises(ClientError) as exc:
- s3.put_bucket_accelerate_configuration(
+ s3_client.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "bad_status"}
)
- exc.value.response["Error"]["Code"].should.equal("MalformedXML")
+ assert exc.value.response["Error"]["Code"] == "MalformedXML"
@mock_s3
def test_accelerate_configuration_is_not_supported_when_bucket_name_has_dots():
bucket_name = "some.bucket.with.dots"
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
with pytest.raises(ClientError) as exc:
- s3.put_bucket_accelerate_configuration(
+ s3_client.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "Enabled"}
)
- exc.value.response["Error"]["Code"].should.equal("InvalidRequest")
+ assert exc.value.response["Error"]["Code"] == "InvalidRequest"
def store_and_read_back_a_key(key):
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
body = b"Some body"
- s3.create_bucket(Bucket=bucket_name)
- s3.put_object(Bucket=bucket_name, Key=key, Body=body)
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=body)
- response = s3.get_object(Bucket=bucket_name, Key=key)
- response["Body"].read().should.equal(body)
+ response = s3_client.get_object(Bucket=bucket_name, Key=key)
+ assert response["Body"].read() == body
@mock_s3
@@ -2789,22 +2791,22 @@ def test_root_dir_with_empty_name_works():
@mock_s3
def test_leading_slashes_not_removed(bucket_name):
"""Make sure that leading slashes are not removed internally."""
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
uploaded_key = "/key"
invalid_key_1 = "key"
invalid_key_2 = "//key"
- s3.put_object(Bucket=bucket_name, Key=uploaded_key, Body=b"Some body")
+ s3_client.put_object(Bucket=bucket_name, Key=uploaded_key, Body=b"Some body")
- with pytest.raises(ClientError) as e:
- s3.get_object(Bucket=bucket_name, Key=invalid_key_1)
- e.value.response["Error"]["Code"].should.equal("NoSuchKey")
+ with pytest.raises(ClientError) as exc:
+ s3_client.get_object(Bucket=bucket_name, Key=invalid_key_1)
+ assert exc.value.response["Error"]["Code"] == "NoSuchKey"
- with pytest.raises(ClientError) as e:
- s3.get_object(Bucket=bucket_name, Key=invalid_key_2)
- e.value.response["Error"]["Code"].should.equal("NoSuchKey")
+ with pytest.raises(ClientError) as exc:
+ s3_client.get_object(Bucket=bucket_name, Key=invalid_key_2)
+ assert exc.value.response["Error"]["Code"] == "NoSuchKey"
@pytest.mark.parametrize(
@@ -2812,27 +2814,27 @@ def test_leading_slashes_not_removed(bucket_name):
)
@mock_s3
def test_delete_objects_with_url_encoded_key(key):
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
body = b"Some body"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
def put_object():
- s3.put_object(Bucket=bucket_name, Key=key, Body=body)
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body=body)
def assert_deleted():
- with pytest.raises(ClientError) as e:
- s3.get_object(Bucket=bucket_name, Key=key)
+ with pytest.raises(ClientError) as exc:
+ s3_client.get_object(Bucket=bucket_name, Key=key)
- e.value.response["Error"]["Code"].should.equal("NoSuchKey")
+ assert exc.value.response["Error"]["Code"] == "NoSuchKey"
put_object()
- s3.delete_object(Bucket=bucket_name, Key=key)
+ s3_client.delete_object(Bucket=bucket_name, Key=key)
assert_deleted()
put_object()
- s3.delete_objects(Bucket=bucket_name, Delete={"Objects": [{"Key": key}]})
+ s3_client.delete_objects(Bucket=bucket_name, Delete={"Objects": [{"Key": key}]})
assert_deleted()
@@ -2843,12 +2845,12 @@ def test_delete_objects_unknown_key():
client.create_bucket(Bucket=bucket_name)
client.put_object(Bucket=bucket_name, Key="file1", Body="body")
- s = client.delete_objects(
+ objs = client.delete_objects(
Bucket=bucket_name, Delete={"Objects": [{"Key": "file1"}, {"Key": "file2"}]}
)
- s["Deleted"].should.have.length_of(2)
- s["Deleted"].should.contain({"Key": "file1"})
- s["Deleted"].should.contain({"Key": "file2"})
+ assert len(objs["Deleted"]) == 2
+ assert {"Key": "file1"} in objs["Deleted"]
+ assert {"Key": "file2"} in objs["Deleted"]
client.delete_bucket(Bucket=bucket_name)
@@ -2859,15 +2861,15 @@ def test_public_access_block():
client.create_bucket(Bucket="mybucket")
# Try to get the public access block (should not exist by default)
- with pytest.raises(ClientError) as ce:
+ with pytest.raises(ClientError) as exc:
client.get_public_access_block(Bucket="mybucket")
- assert ce.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
+ assert exc.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
assert (
- ce.value.response["Error"]["Message"]
+ exc.value.response["Error"]["Message"]
== "The public access block configuration was not found"
)
- assert ce.value.response["ResponseMetadata"]["HTTPStatusCode"] == 404
+ assert exc.value.response["ResponseMetadata"]["HTTPStatusCode"] == 404
# Put a public block in place:
test_map = {
@@ -2877,7 +2879,7 @@ def test_public_access_block():
"RestrictPublicBuckets": False,
}
- for field in test_map.keys():
+ for field in test_map:
# Toggle:
test_map[field] = True
@@ -2907,17 +2909,17 @@ def test_public_access_block():
}
# Test with a blank PublicAccessBlockConfiguration:
- with pytest.raises(ClientError) as ce:
+ with pytest.raises(ClientError) as exc:
client.put_public_access_block(
Bucket="mybucket", PublicAccessBlockConfiguration={}
)
- assert ce.value.response["Error"]["Code"] == "InvalidRequest"
+ assert exc.value.response["Error"]["Code"] == "InvalidRequest"
assert (
- ce.value.response["Error"]["Message"]
+ exc.value.response["Error"]["Message"]
== "Must specify at least one configuration."
)
- assert ce.value.response["ResponseMetadata"]["HTTPStatusCode"] == 400
+ assert exc.value.response["ResponseMetadata"]["HTTPStatusCode"] == 400
# Test that things work with AWS Config:
config_client = boto3.client("config", region_name=DEFAULT_REGION_NAME)
@@ -2940,16 +2942,16 @@ def test_public_access_block():
# Delete:
client.delete_public_access_block(Bucket="mybucket")
- with pytest.raises(ClientError) as ce:
+ with pytest.raises(ClientError) as exc:
client.get_public_access_block(Bucket="mybucket")
- assert ce.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
+ assert exc.value.response["Error"]["Code"] == "NoSuchPublicAccessBlockConfiguration"
@mock_s3
def test_creating_presigned_post():
bucket = "presigned-test"
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket=bucket)
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket=bucket)
success_url = "http://localhost/completed"
fdata = b"test data\n"
file_uid = uuid.uuid4()
@@ -2961,7 +2963,7 @@ def test_creating_presigned_post():
conditions.append(["content-length-range", 1, 30])
real_key = f"{file_uid}.txt"
- data = s3.generate_presigned_post(
+ data = s3_client.generate_presigned_post(
Bucket=bucket,
Key=real_key,
Fields={
@@ -2983,7 +2985,7 @@ def test_creating_presigned_post():
assert args["key"][0] == real_key
assert args["bucket"][0] == bucket
- assert s3.get_object(Bucket=bucket, Key=real_key)["Body"].read() == fdata
+ assert s3_client.get_object(Bucket=bucket, Key=real_key)["Body"].read() == fdata
@mock_s3
@@ -2994,47 +2996,49 @@ def test_presigned_put_url_with_approved_headers():
expected_contenttype = "app/sth"
conn = boto3.resource("s3", region_name="us-east-1")
conn.create_bucket(Bucket=bucket)
- s3 = boto3.client("s3", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
# Create a pre-signed url with some metadata.
- url = s3.generate_presigned_url(
+ url = s3_client.generate_presigned_url(
ClientMethod="put_object",
Params={"Bucket": bucket, "Key": key, "ContentType": expected_contenttype},
)
# Verify S3 throws an error when the header is not provided
response = requests.put(url, data=content)
- response.status_code.should.equal(403)
- str(response.content).should.contain("SignatureDoesNotMatch")
- str(response.content).should.contain(
- "The request signature we calculated does not match the signature you provided. Check your key and signing method."
- )
+ assert response.status_code == 403
+ assert "SignatureDoesNotMatch" in str(response.content)
+ assert (
+ "The request signature we calculated does not match the "
+ "signature you provided. Check your key and signing method."
+ ) in str(response.content)
# Verify S3 throws an error when the header has the wrong value
response = requests.put(
url, data=content, headers={"Content-Type": "application/unknown"}
)
- response.status_code.should.equal(403)
- str(response.content).should.contain("SignatureDoesNotMatch")
- str(response.content).should.contain(
- "The request signature we calculated does not match the signature you provided. Check your key and signing method."
- )
+ assert response.status_code == 403
+ assert "SignatureDoesNotMatch" in str(response.content)
+ assert (
+ "The request signature we calculated does not match the "
+ "signature you provided. Check your key and signing method."
+ ) in str(response.content)
# Verify S3 uploads correctly when providing the meta data
response = requests.put(
url, data=content, headers={"Content-Type": expected_contenttype}
)
- response.status_code.should.equal(200)
+ assert response.status_code == 200
# Assert the object exists
- obj = s3.get_object(Bucket=bucket, Key=key)
- obj["ContentType"].should.equal(expected_contenttype)
- obj["ContentLength"].should.equal(11)
- obj["Body"].read().should.equal(content)
- obj["Metadata"].should.equal({})
+ obj = s3_client.get_object(Bucket=bucket, Key=key)
+ assert obj["ContentType"] == expected_contenttype
+ assert obj["ContentLength"] == 11
+ assert obj["Body"].read() == content
+ assert obj["Metadata"] == {}
- s3.delete_object(Bucket=bucket, Key=key)
- s3.delete_bucket(Bucket=bucket)
+ s3_client.delete_object(Bucket=bucket, Key=key)
+ s3_client.delete_bucket(Bucket=bucket)
@mock_s3
@@ -3044,77 +3048,77 @@ def test_presigned_put_url_with_custom_headers():
content = b"filecontent"
conn = boto3.resource("s3", region_name="us-east-1")
conn.create_bucket(Bucket=bucket)
- s3 = boto3.client("s3", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
# Create a pre-signed url with some metadata.
- url = s3.generate_presigned_url(
+ url = s3_client.generate_presigned_url(
ClientMethod="put_object",
Params={"Bucket": bucket, "Key": key, "Metadata": {"venue": "123"}},
)
# Verify S3 uploads correctly when providing the meta data
response = requests.put(url, data=content)
- response.status_code.should.equal(200)
+ assert response.status_code == 200
# Assert the object exists
- obj = s3.get_object(Bucket=bucket, Key=key)
- obj["ContentLength"].should.equal(11)
- obj["Body"].read().should.equal(content)
- obj["Metadata"].should.equal({"venue": "123"})
+ obj = s3_client.get_object(Bucket=bucket, Key=key)
+ assert obj["ContentLength"] == 11
+ assert obj["Body"].read() == content
+ assert obj["Metadata"] == {"venue": "123"}
- s3.delete_object(Bucket=bucket, Key=key)
- s3.delete_bucket(Bucket=bucket)
+ s3_client.delete_object(Bucket=bucket, Key=key)
+ s3_client.delete_bucket(Bucket=bucket)
@mock_s3
def test_request_partial_content_should_contain_content_length():
bucket = "bucket"
object_key = "key"
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket)
- s3.Object(bucket, object_key).put(Body="some text")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource.create_bucket(Bucket=bucket)
+ s3_resource.Object(bucket, object_key).put(Body="some text")
- file = s3.Object(bucket, object_key)
+ file = s3_resource.Object(bucket, object_key)
response = file.get(Range="bytes=0-1024")
- response["ContentLength"].should.equal(9)
+ assert response["ContentLength"] == 9
@mock_s3
def test_request_partial_content_should_contain_actual_content_length():
bucket = "bucket"
object_key = "key"
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket)
- s3.Object(bucket, object_key).put(Body="some text")
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource.create_bucket(Bucket=bucket)
+ s3_resource.Object(bucket, object_key).put(Body="some text")
- file = s3.Object(bucket, object_key)
+ file = s3_resource.Object(bucket, object_key)
requested_range = "bytes=1024-"
try:
file.get(Range=requested_range)
- except botocore.client.ClientError as e:
- e.response["Error"]["Code"].should.equal("InvalidRange")
- e.response["Error"]["Message"].should.equal(
- "The requested range is not satisfiable"
+ except botocore.client.ClientError as exc:
+ assert exc.response["Error"]["Code"] == "InvalidRange"
+ assert (
+ exc.response["Error"]["Message"] == "The requested range is not satisfiable"
)
- e.response["Error"]["ActualObjectSize"].should.equal("9")
- e.response["Error"]["RangeRequested"].should.equal(requested_range)
+ assert exc.response["Error"]["ActualObjectSize"] == "9"
+ assert exc.response["Error"]["RangeRequested"] == requested_range
@mock_s3
def test_get_unknown_version_should_throw_specific_error():
bucket_name = "my_bucket"
object_key = "hello.txt"
- s3 = boto3.resource("s3", region_name="us-east-1")
+ s3_resource = boto3.resource("s3", region_name="us-east-1")
client = boto3.client("s3", region_name="us-east-1")
- bucket = s3.create_bucket(Bucket=bucket_name)
+ bucket = s3_resource.create_bucket(Bucket=bucket_name)
bucket.Versioning().enable()
content = "some text"
- s3.Object(bucket_name, object_key).put(Body=content)
+ s3_resource.Object(bucket_name, object_key).put(Body=content)
- with pytest.raises(ClientError) as e:
+ with pytest.raises(ClientError) as exc:
client.get_object(Bucket=bucket_name, Key=object_key, VersionId="unknown")
- e.value.response["Error"]["Code"].should.equal("NoSuchVersion")
- e.value.response["Error"]["Message"].should.equal(
+ assert exc.value.response["Error"]["Code"] == "NoSuchVersion"
+ assert exc.value.response["Error"]["Message"] == (
"The specified version does not exist."
)
@@ -3123,22 +3127,22 @@ def test_get_unknown_version_should_throw_specific_error():
def test_request_partial_content_without_specifying_range_should_return_full_object():
bucket = "bucket"
object_key = "key"
- s3 = boto3.resource("s3", region_name="us-east-1")
- s3.create_bucket(Bucket=bucket)
- s3.Object(bucket, object_key).put(Body="some text that goes a long way")
+ s3_resource = boto3.resource("s3", region_name="us-east-1")
+ s3_resource.create_bucket(Bucket=bucket)
+ s3_resource.Object(bucket, object_key).put(Body="some text that goes a long way")
- file = s3.Object(bucket, object_key)
+ file = s3_resource.Object(bucket, object_key)
response = file.get(Range="")
- response["ContentLength"].should.equal(30)
+ assert response["ContentLength"] == 30
@mock_s3
def test_object_headers():
bucket = "my-bucket"
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket)
- res = s3.put_object(
+ res = s3_client.put_object(
Bucket=bucket,
Body=b"test",
Key="file.txt",
@@ -3146,16 +3150,16 @@ def test_object_headers():
SSEKMSKeyId="test",
BucketKeyEnabled=True,
)
- res.should.have.key("ETag")
- res.should.have.key("ServerSideEncryption")
- res.should.have.key("SSEKMSKeyId")
- res.should.have.key("BucketKeyEnabled")
+ assert "ETag" in res
+ assert "ServerSideEncryption" in res
+ assert "SSEKMSKeyId" in res
+ assert "BucketKeyEnabled" in res
- res = s3.get_object(Bucket=bucket, Key="file.txt")
- res.should.have.key("ETag")
- res.should.have.key("ServerSideEncryption")
- res.should.have.key("SSEKMSKeyId")
- res.should.have.key("BucketKeyEnabled")
+ res = s3_client.get_object(Bucket=bucket, Key="file.txt")
+ assert "ETag" in res
+ assert "ServerSideEncryption" in res
+ assert "SSEKMSKeyId" in res
+ assert "BucketKeyEnabled" in res
if settings.TEST_SERVER_MODE:
@@ -3163,13 +3167,13 @@ if settings.TEST_SERVER_MODE:
@mock_s3
def test_upload_data_without_content_type():
bucket = "mybucket"
- s3 = boto3.client("s3")
- s3.create_bucket(Bucket=bucket)
+ s3_client = boto3.client("s3")
+ s3_client.create_bucket(Bucket=bucket)
data_input = b"some data 123 321"
req = requests.put("http://localhost:5000/mybucket/test.txt", data=data_input)
- req.status_code.should.equal(200)
+ assert req.status_code == 200
- res = s3.get_object(Bucket=bucket, Key="test.txt")
+ res = s3_client.get_object(Bucket=bucket, Key="test.txt")
data = res["Body"].read()
assert data == data_input
@@ -3191,8 +3195,8 @@ def test_get_object_versions_with_prefix(prefix):
s3_client.put_object(Bucket=bucket_name, Body=b"test", Key=f"{prefix}.txt")
versions = s3_client.list_object_versions(Bucket=bucket_name, Prefix=prefix)
- versions["Versions"].should.have.length_of(3)
- versions["Prefix"].should.equal(prefix)
+ assert len(versions["Versions"]) == 3
+ assert versions["Prefix"] == prefix
@mock_s3
@@ -3214,11 +3218,11 @@ def test_create_bucket_duplicate():
CreateBucketConfiguration={"LocationConstraint": alternate_region},
)
err = ex.value.response["Error"]
- err["Code"].should.equal("BucketAlreadyOwnedByYou")
- err["Message"].should.equal(
+ assert err["Code"] == "BucketAlreadyOwnedByYou"
+ assert err["Message"] == (
"Your previous request to create the named bucket succeeded and you already own it."
)
- err["BucketName"].should.equal(bucket_name)
+ assert err["BucketName"] == bucket_name
# Try this again - but creating the bucket in a non-default region in the first place
bucket_name = "same-bucket-nondefault-region-test-1371"
@@ -3234,22 +3238,22 @@ def test_create_bucket_duplicate():
CreateBucketConfiguration={"LocationConstraint": alternate_region},
)
err = ex.value.response["Error"]
- err["Code"].should.equal("BucketAlreadyOwnedByYou")
- err["Message"].should.equal(
+ assert err["Code"] == "BucketAlreadyOwnedByYou"
+ assert err["Message"] == (
"Your previous request to create the named bucket succeeded and you already own it."
)
- err["BucketName"].should.equal(bucket_name)
+ assert err["BucketName"] == bucket_name
# Recreating the bucket in the default region should fail
diff_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
with pytest.raises(ClientError) as ex:
diff_client.create_bucket(Bucket=bucket_name)
err = ex.value.response["Error"]
- err["Code"].should.equal("BucketAlreadyOwnedByYou")
- err["Message"].should.equal(
+ assert err["Code"] == "BucketAlreadyOwnedByYou"
+ assert err["Message"] == (
"Your previous request to create the named bucket succeeded and you already own it."
)
- err["BucketName"].should.equal(bucket_name)
+ assert err["BucketName"] == bucket_name
# Recreating the bucket in a third region should fail
diff_client = boto3.client("s3", region_name="ap-northeast-1")
@@ -3259,11 +3263,11 @@ def test_create_bucket_duplicate():
CreateBucketConfiguration={"LocationConstraint": "ap-northeast-1"},
)
err = ex.value.response["Error"]
- err["Code"].should.equal("BucketAlreadyOwnedByYou")
- err["Message"].should.equal(
+ assert err["Code"] == "BucketAlreadyOwnedByYou"
+ assert err["Message"] == (
"Your previous request to create the named bucket succeeded and you already own it."
)
- err["BucketName"].should.equal(bucket_name)
+ assert err["BucketName"] == bucket_name
@mock_s3
@@ -3274,32 +3278,35 @@ def test_delete_objects_with_empty_keyname():
bucket = resource.create_bucket(Bucket=bucket_name)
key_name = " "
bucket.put_object(Key=key_name, Body=b"")
- client.list_objects(Bucket=bucket_name).should.have.key("Contents").length_of(1)
+ assert len(client.list_objects(Bucket=bucket_name)["Contents"]) == 1
bucket.delete_objects(Delete={"Objects": [{"Key": key_name}]})
- client.list_objects(Bucket=bucket_name).shouldnt.have.key("Contents")
+ assert "Contents" not in client.list_objects(Bucket=bucket_name)
bucket.put_object(Key=key_name, Body=b"")
client.delete_object(Bucket=bucket_name, Key=key_name)
- client.list_objects(Bucket=bucket_name).shouldnt.have.key("Contents")
+ assert "Contents" not in client.list_objects(Bucket=bucket_name)
@mock_s3
def test_head_object_should_return_default_content_type():
- s3 = boto3.resource("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="testbucket")
- s3.Bucket("testbucket").upload_fileobj(BytesIO(b"foobar"), Key="testobject")
+ s3_resource = boto3.resource("s3", region_name="us-east-1")
+ s3_resource.create_bucket(Bucket="testbucket")
+ s3_resource.Bucket("testbucket").upload_fileobj(
+ BytesIO(b"foobar"), Key="testobject"
+ )
s3_client = boto3.client("s3", region_name="us-east-1")
resp = s3_client.head_object(Bucket="testbucket", Key="testobject")
- resp["ContentType"].should.equal("binary/octet-stream")
- resp["ResponseMetadata"]["HTTPHeaders"]["content-type"].should.equal(
+ assert resp["ContentType"] == "binary/octet-stream"
+ assert resp["ResponseMetadata"]["HTTPHeaders"]["content-type"] == (
"binary/octet-stream"
)
- s3.Object("testbucket", "testobject").content_type.should.equal(
- "binary/octet-stream"
+ assert (
+ s3_resource.Object("testbucket", "testobject").content_type
+ == "binary/octet-stream"
)
@@ -3311,8 +3318,8 @@ def test_request_partial_content_should_contain_all_metadata():
body = "some text"
query_range = "0-3"
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource.create_bucket(Bucket=bucket)
obj = boto3.resource("s3").Object(bucket, object_key)
obj.put(Body=body)
@@ -3398,8 +3405,8 @@ def test_cross_account_region_access():
with mock.patch.dict(os.environ, {"MOTO_ACCOUNT_ID": account2}):
# Ensure the bucket can be retrieved from another account
response = client2.list_objects(Bucket=bucket_name)
- response.should.have.key("Contents").length_of(1)
- response["Contents"][0]["Key"].should.equal(key)
+ assert len(response["Contents"]) == 1
+ assert response["Contents"][0]["Key"] == key
assert client2.get_object(Bucket=bucket_name, Key=key)
@@ -3408,8 +3415,8 @@ def test_cross_account_region_access():
# Ensure bucket namespace is shared across accounts
with pytest.raises(ClientError) as exc:
client2.create_bucket(Bucket=bucket_name)
- exc.value.response["Error"]["Code"].should.equal("BucketAlreadyExists")
- exc.value.response["Error"]["Message"].should.equal(
+ assert exc.value.response["Error"]["Code"] == "BucketAlreadyExists"
+ assert exc.value.response["Error"]["Message"] == (
"The requested bucket name is not available. The bucket "
"namespace is shared by all users of the system. Please "
"select a different name and try again"
diff --git a/tests/test_s3/test_s3_acl.py b/tests/test_s3/test_s3_acl.py
index 041b3c87d..798b6c5e7 100644
--- a/tests/test_s3/test_s3_acl.py
+++ b/tests/test_s3/test_s3_acl.py
@@ -1,13 +1,13 @@
-import boto3
import os
+from uuid import uuid4
+
+import boto3
import pytest
import requests
-import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from botocore.handlers import disable_signing
from moto import mock_s3
-from uuid import uuid4
DEFAULT_REGION_NAME = "us-east-1"
@@ -22,10 +22,10 @@ DEFAULT_REGION_NAME = "us-east-1"
@pytest.mark.parametrize("readwrite", ["Read", "Write"])
@mock_s3
def test_put_object_acl_using_grant(readwrite, type_key, value, has_quotes):
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = str(uuid4())
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
bucket.create()
keyname = "test.txt"
@@ -40,16 +40,16 @@ def test_put_object_acl_using_grant(readwrite, type_key, value, has_quotes):
client.put_object_acl(**args)
grants = client.get_object_acl(Bucket=bucket_name, Key=keyname)["Grants"]
- grants.should.have.length_of(1)
- grants[0].should.have.key("Grantee").equal({"Type": _type, response_key: value})
- grants[0].should.have.key("Permission").equal(readwrite.upper())
+ assert len(grants) == 1
+ assert grants[0]["Grantee"] == {"Type": _type, response_key: value}
+ assert grants[0]["Permission"] == readwrite.upper()
@mock_s3
def test_acl_switching_boto3():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
keyname = "test.txt"
@@ -57,32 +57,30 @@ def test_acl_switching_boto3():
client.put_object_acl(ACL="private", Bucket="foobar", Key=keyname)
grants = client.get_object_acl(Bucket="foobar", Key=keyname)["Grants"]
- grants.shouldnt.contain(
- {
- "Grantee": {
- "Type": "Group",
- "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
- },
- "Permission": "READ",
- }
- )
+ assert {
+ "Grantee": {
+ "Type": "Group",
+ "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
+ },
+ "Permission": "READ",
+ } not in grants
@mock_s3
def test_acl_switching_nonexistent_key():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
- with pytest.raises(ClientError) as e:
- s3.put_object_acl(Bucket="mybucket", Key="nonexistent", ACL="private")
+ with pytest.raises(ClientError) as exc:
+ s3_client.put_object_acl(Bucket="mybucket", Key="nonexistent", ACL="private")
- e.value.response["Error"]["Code"].should.equal("NoSuchKey")
+ assert exc.value.response["Error"]["Code"] == "NoSuchKey"
@mock_s3
def test_s3_object_in_public_bucket():
- s3 = boto3.resource("s3")
- bucket = s3.Bucket("test-bucket")
+ s3_resource = boto3.resource("s3")
+ bucket = s3_resource.Bucket("test-bucket")
bucket.create(
ACL="public-read", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
@@ -96,19 +94,19 @@ def test_s3_object_in_public_bucket():
.get()["Body"]
.read()
)
- contents.should.equal(b"ABCD")
+ assert contents == b"ABCD"
bucket.put_object(ACL="private", Body=b"ABCD", Key="file.txt")
with pytest.raises(ClientError) as exc:
s3_anonymous.Object(key="file.txt", bucket_name="test-bucket").get()
- exc.value.response["Error"]["Code"].should.equal("403")
+ assert exc.value.response["Error"]["Code"] == "403"
@mock_s3
def test_s3_object_in_public_bucket_using_multiple_presigned_urls():
- s3 = boto3.resource("s3")
- bucket = s3.Bucket("test-bucket")
+ s3_resource = boto3.resource("s3")
+ bucket = s3_resource.Bucket("test-bucket")
bucket.create(
ACL="public-read", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
@@ -125,8 +123,8 @@ def test_s3_object_in_public_bucket_using_multiple_presigned_urls():
@mock_s3
def test_s3_object_in_private_bucket():
- s3 = boto3.resource("s3")
- bucket = s3.Bucket("test-bucket")
+ s3_resource = boto3.resource("s3")
+ bucket = s3_resource.Bucket("test-bucket")
bucket.create(
ACL="private", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
@@ -137,7 +135,7 @@ def test_s3_object_in_private_bucket():
with pytest.raises(ClientError) as exc:
s3_anonymous.Object(key="file.txt", bucket_name="test-bucket").get()
- exc.value.response["Error"]["Code"].should.equal("403")
+ assert exc.value.response["Error"]["Code"] == "403"
bucket.put_object(ACL="public-read", Body=b"ABCD", Key="file.txt")
contents = (
@@ -145,15 +143,15 @@ def test_s3_object_in_private_bucket():
.get()["Body"]
.read()
)
- contents.should.equal(b"ABCD")
+ assert contents == b"ABCD"
@mock_s3
def test_put_bucket_acl_body():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="bucket")
- bucket_owner = s3.get_bucket_acl(Bucket="bucket")["Owner"]
- s3.put_bucket_acl(
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="bucket")
+ bucket_owner = s3_client.get_bucket_acl(Bucket="bucket")["Owner"]
+ s3_client.put_bucket_acl(
Bucket="bucket",
AccessControlPolicy={
"Grants": [
@@ -176,15 +174,17 @@ def test_put_bucket_acl_body():
},
)
- result = s3.get_bucket_acl(Bucket="bucket")
+ result = s3_client.get_bucket_acl(Bucket="bucket")
assert len(result["Grants"]) == 2
- for g in result["Grants"]:
- assert g["Grantee"]["URI"] == "http://acs.amazonaws.com/groups/s3/LogDelivery"
- assert g["Grantee"]["Type"] == "Group"
- assert g["Permission"] in ["WRITE", "READ_ACP"]
+ for grant in result["Grants"]:
+ assert (
+ grant["Grantee"]["URI"] == "http://acs.amazonaws.com/groups/s3/LogDelivery"
+ )
+ assert grant["Grantee"]["Type"] == "Group"
+ assert grant["Permission"] in ["WRITE", "READ_ACP"]
# With one:
- s3.put_bucket_acl(
+ s3_client.put_bucket_acl(
Bucket="bucket",
AccessControlPolicy={
"Grants": [
@@ -199,12 +199,12 @@ def test_put_bucket_acl_body():
"Owner": bucket_owner,
},
)
- result = s3.get_bucket_acl(Bucket="bucket")
+ result = s3_client.get_bucket_acl(Bucket="bucket")
assert len(result["Grants"]) == 1
# With no owner:
with pytest.raises(ClientError) as err:
- s3.put_bucket_acl(
+ s3_client.put_bucket_acl(
Bucket="bucket",
AccessControlPolicy={
"Grants": [
@@ -222,7 +222,7 @@ def test_put_bucket_acl_body():
# With incorrect permission:
with pytest.raises(ClientError) as err:
- s3.put_bucket_acl(
+ s3_client.put_bucket_acl(
Bucket="bucket",
AccessControlPolicy={
"Grants": [
@@ -240,7 +240,7 @@ def test_put_bucket_acl_body():
assert err.value.response["Error"]["Code"] == "MalformedACLError"
# Clear the ACLs:
- result = s3.put_bucket_acl(
+ result = s3_client.put_bucket_acl(
Bucket="bucket", AccessControlPolicy={"Grants": [], "Owner": bucket_owner}
)
assert not result.get("Grants")
@@ -248,31 +248,31 @@ def test_put_bucket_acl_body():
@mock_s3
def test_object_acl_with_presigned_post():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "imageS3Bucket"
object_name = "text.txt"
fields = {"acl": "public-read"}
- file = open("text.txt", "w")
+ file = open("text.txt", "w", encoding="utf-8")
file.write("test")
file.close()
- s3.create_bucket(Bucket=bucket_name)
- response = s3.generate_presigned_post(
+ s3_client.create_bucket(Bucket=bucket_name)
+ response = s3_client.generate_presigned_post(
bucket_name, object_name, Fields=fields, ExpiresIn=60000
)
- with open(object_name, "rb") as f:
- files = {"file": (object_name, f)}
+ with open(object_name, "rb") as fhandle:
+ files = {"file": (object_name, fhandle)}
requests.post(response["url"], data=response["fields"], files=files)
- response = s3.get_object_acl(Bucket=bucket_name, Key=object_name)
+ response = s3_client.get_object_acl(Bucket=bucket_name, Key=object_name)
assert "Grants" in response
assert len(response["Grants"]) == 2
assert response["Grants"][1]["Permission"] == "READ"
- response = s3.get_object(Bucket=bucket_name, Key=object_name)
+ response = s3_client.get_object(Bucket=bucket_name, Key=object_name)
assert "ETag" in response
assert "Body" in response
@@ -281,9 +281,9 @@ def test_object_acl_with_presigned_post():
@mock_s3
def test_acl_setting_boto3():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
content = b"imafile"
@@ -293,22 +293,20 @@ def test_acl_setting_boto3():
)
grants = client.get_object_acl(Bucket="foobar", Key=keyname)["Grants"]
- grants.should.contain(
- {
- "Grantee": {
- "Type": "Group",
- "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
- },
- "Permission": "READ",
- }
- )
+ assert {
+ "Grantee": {
+ "Type": "Group",
+ "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
+ },
+ "Permission": "READ",
+ } in grants
@mock_s3
def test_acl_setting_via_headers_boto3():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("foobar")
+ bucket = s3_resource.Bucket("foobar")
bucket.create()
keyname = "test.txt"
@@ -317,28 +315,27 @@ def test_acl_setting_via_headers_boto3():
client.put_object_acl(ACL="public-read", Bucket="foobar", Key=keyname)
grants = client.get_object_acl(Bucket="foobar", Key=keyname)["Grants"]
- grants.should.contain(
- {
- "Grantee": {
- "Type": "Group",
- "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
- },
- "Permission": "READ",
- }
- )
+ assert {
+ "Grantee": {
+ "Type": "Group",
+ "URI": "http://acs.amazonaws.com/groups/global/AllUsers",
+ },
+ "Permission": "READ",
+ } in grants
@mock_s3
def test_raise_exception_for_grant_and_acl():
client = boto3.client("s3")
- s3 = boto3.resource("s3")
+ s3_resource = boto3.resource("s3")
bucket_name = "bucketname"
client.create_bucket(Bucket=bucket_name)
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
acl = client.get_bucket_acl(Bucket=bucket_name)
acl_grantee_id = acl["Owner"]["ID"]
- # This should raise an exception or provide some error message, but runs without exception instead.
+ # This should raise an exception or provide some error message,
+ # but runs without exception instead.
with pytest.raises(ClientError) as exc:
bucket.put_object(
ACL="bucket-owner-full-control",
@@ -348,7 +345,7 @@ def test_raise_exception_for_grant_and_acl():
GrantFullControl=f'id="{acl_grantee_id}"',
)
err = exc.value.response["Error"]
- err["Code"].should.equal("InvalidRequest")
- err["Message"].should.equal(
- "Specifying both Canned ACLs and Header Grants is not allowed"
+ assert err["Code"] == "InvalidRequest"
+ assert (
+ err["Message"] == "Specifying both Canned ACLs and Header Grants is not allowed"
)
diff --git a/tests/test_s3/test_s3_auth.py b/tests/test_s3/test_s3_auth.py
index 35288baf9..e38d8bf3f 100644
--- a/tests/test_s3/test_s3_auth.py
+++ b/tests/test_s3/test_s3_auth.py
@@ -1,12 +1,12 @@
-import boto3
import json
+from unittest import SkipTest
+
+import boto3
import pytest
-import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_iam, mock_s3, mock_sts, settings
from moto.core import DEFAULT_ACCOUNT_ID as ACCOUNT_ID, set_initial_no_auth_action_count
-from unittest import SkipTest
@mock_s3
@@ -15,15 +15,15 @@ def test_load_unexisting_object_without_auth_should_return_403():
if settings.TEST_SERVER_MODE:
raise SkipTest("Auth decorator does not work in server mode")
- """Head an S3 object we should have no access to."""
+ # Head an S3 object we should have no access to.
resource = boto3.resource("s3", region_name="us-east-1")
obj = resource.Object("myfakebucket", "myfakekey")
with pytest.raises(ClientError) as ex:
obj.load()
err = ex.value.response["Error"]
- err["Code"].should.equal("InvalidAccessKeyId")
- err["Message"].should.equal(
+ assert err["Code"] == "InvalidAccessKeyId"
+ assert err["Message"] == (
"The AWS Access Key Id you provided does not exist in our records."
)
@@ -38,12 +38,12 @@ def test_head_bucket_with_correct_credentials():
iam_keys = create_user_with_access_key_and_policy()
# This S3-client has correct credentials
- s3 = boto3.client(
+ s3_client = boto3.client(
"s3",
aws_access_key_id=iam_keys["AccessKeyId"],
aws_secret_access_key=iam_keys["SecretAccessKey"],
)
- s3.create_bucket(Bucket="mock_bucket")
+ s3_client.create_bucket(Bucket="mock_bucket")
# Calling head_bucket with the correct credentials works
my_head_bucket(
@@ -52,10 +52,11 @@ def test_head_bucket_with_correct_credentials():
aws_secret_access_key=iam_keys["SecretAccessKey"],
)
- # Verify we can make calls that contain a querystring
- # Specifically, verify that we are able to build/match the AWS signature for a URL with a querystring
- s3.get_bucket_location(Bucket="mock_bucket")
- s3.list_objects_v2(Bucket="mock_bucket")
+ # Verify we can make calls that contain a querystring. Specifically,
+ # verify that we are able to build/match the AWS signature for a URL
+ # with a querystring.
+ s3_client.get_bucket_location(Bucket="mock_bucket")
+ s3_client.list_objects_v2(Bucket="mock_bucket")
@set_initial_no_auth_action_count(4)
@@ -68,12 +69,12 @@ def test_head_bucket_with_incorrect_credentials():
iam_keys = create_user_with_access_key_and_policy()
# Create the bucket with correct credentials
- s3 = boto3.client(
+ s3_client = boto3.client(
"s3",
aws_access_key_id=iam_keys["AccessKeyId"],
aws_secret_access_key=iam_keys["SecretAccessKey"],
)
- s3.create_bucket(Bucket="mock_bucket")
+ s3_client.create_bucket(Bucket="mock_bucket")
# Call head_bucket with incorrect credentials
with pytest.raises(ClientError) as ex:
@@ -83,8 +84,8 @@ def test_head_bucket_with_incorrect_credentials():
aws_secret_access_key="invalid",
)
err = ex.value.response["Error"]
- err["Code"].should.equal("SignatureDoesNotMatch")
- err["Message"].should.equal(
+ assert err["Code"] == "SignatureDoesNotMatch"
+ assert err["Message"] == (
"The request signature we calculated does not match the signature you provided. "
"Check your key and signing method."
)
@@ -200,12 +201,13 @@ def test_delete_objects_without_access_throws_custom_error():
)
bucket = s3_resource.Bucket(bucket_name)
- # This action is not allowed
- # It should return a 200-response, with the body indicating that we do not have access
+ # This action is not allowed.
+ # It should return a 200-response, with the body indicating that we
+ # do not have access.
response = bucket.objects.filter(Prefix="some/prefix").delete()[0]
- response.should.have.key("Errors").length_of(1)
+ assert len(response["Errors"]) == 1
error = response["Errors"][0]
- error.should.have.key("Key").equals("some/prefix/test_file.txt")
- error.should.have.key("Code").equals("AccessDenied")
- error.should.have.key("Message").equals("Access Denied")
+ assert error["Key"] == "some/prefix/test_file.txt"
+ assert error["Code"] == "AccessDenied"
+ assert error["Message"] == "Access Denied"
diff --git a/tests/test_s3/test_s3_bucket_policy.py b/tests/test_s3/test_s3_bucket_policy.py
index 34566786e..1b7bbea62 100644
--- a/tests/test_s3/test_s3_bucket_policy.py
+++ b/tests/test_s3/test_s3_bucket_policy.py
@@ -1,15 +1,15 @@
-import boto3
import json
+
+import boto3
import requests
import pytest
-import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto.moto_server.threaded_moto_server import ThreadedMotoServer
class TestBucketPolicy:
- @staticmethod
+ @classmethod
def setup_class(cls):
cls.server = ThreadedMotoServer(port="6000", verbose=False)
cls.server.start()
@@ -30,7 +30,7 @@ class TestBucketPolicy:
self.client.delete_object(Bucket="mybucket", Key="test_txt")
self.client.delete_bucket(Bucket="mybucket")
- @staticmethod
+ @classmethod
def teardown_class(cls):
cls.server.stop()
@@ -67,7 +67,7 @@ class TestBucketPolicy:
with pytest.raises(ClientError):
self.client.get_object(Bucket="mybucket", Key="test_txt")
- requests.get(self.key_name).status_code.should.equal(status)
+ assert requests.get(self.key_name).status_code == status
def test_block_put_object(self):
# Block Put-access
@@ -80,9 +80,9 @@ class TestBucketPolicy:
with pytest.raises(ClientError) as exc:
self.client.put_object(Bucket="mybucket", Key="test_txt", Body="new data")
err = exc.value.response["Error"]
- err["Message"].should.equal("Forbidden")
+ assert err["Message"] == "Forbidden"
- requests.put(self.key_name).status_code.should.equal(403)
+ assert requests.put(self.key_name).status_code == 403
def test_block_all_actions(self):
# Block all access
@@ -92,16 +92,16 @@ class TestBucketPolicy:
with pytest.raises(ClientError) as exc:
self.client.get_object(Bucket="mybucket", Key="test_txt")
err = exc.value.response["Error"]
- err["Message"].should.equal("Forbidden")
+ assert err["Message"] == "Forbidden"
# But Put (via boto3 or requests) is not allowed
with pytest.raises(ClientError) as exc:
self.client.put_object(Bucket="mybucket", Key="test_txt", Body="new data")
err = exc.value.response["Error"]
- err["Message"].should.equal("Forbidden")
+ assert err["Message"] == "Forbidden"
- requests.get(self.key_name).status_code.should.equal(403)
- requests.put(self.key_name).status_code.should.equal(403)
+ assert requests.get(self.key_name).status_code == 403
+ assert requests.put(self.key_name).status_code == 403
# Allow access again, because we want to delete the object during teardown
self._put_policy(**{"effect": "Allow", "actions": ["s3:*"]})
diff --git a/tests/test_s3/test_s3_classdecorator.py b/tests/test_s3/test_s3_classdecorator.py
index ecb10299d..d5c3c65e7 100644
--- a/tests/test_s3/test_s3_classdecorator.py
+++ b/tests/test_s3/test_s3_classdecorator.py
@@ -1,6 +1,6 @@
-import boto3
-import sure # noqa # pylint: disable=unused-import
import unittest
+
+import boto3
from moto import mock_s3
@@ -9,10 +9,9 @@ class ClassDecoratorTest(unittest.TestCase):
"""
https://github.com/getmoto/moto/issues/3535
An update to the mock-package introduced a failure during teardown.
- This test is in place to catch any similar failures with our mocking approach
+ This test is in place to catch any similar failures with our
+ mocking approach.
"""
def test_instantiation_succeeds(self):
- s3 = boto3.client("s3", region_name="us-east-1")
-
- assert s3 is not None
+ assert boto3.client("s3", region_name="us-east-1") is not None
diff --git a/tests/test_s3/test_s3_cloudformation.py b/tests/test_s3/test_s3_cloudformation.py
index 638cb1c3e..b95710686 100644
--- a/tests/test_s3/test_s3_cloudformation.py
+++ b/tests/test_s3/test_s3_cloudformation.py
@@ -1,7 +1,7 @@
import json
-import boto3
+import re
-import sure # noqa # pylint: disable=unused-import
+import boto3
from moto import mock_s3, mock_cloudformation
@@ -9,8 +9,8 @@ from moto import mock_s3, mock_cloudformation
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_basic():
- s3 = boto3.client("s3", region_name="us-east-1")
- cf = boto3.client("cloudformation", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ cf_client = boto3.client("cloudformation", region_name="us-east-1")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
@@ -18,17 +18,17 @@ def test_s3_bucket_cloudformation_basic():
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
- cf.create_stack(StackName="test_stack", TemplateBody=template_json)
- stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
+ cf_client.create_stack(StackName="test_stack", TemplateBody=template_json)
+ stack_description = cf_client.describe_stacks(StackName="test_stack")["Stacks"][0]
- s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
+ s3_client.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_with_properties():
- s3 = boto3.client("s3", region_name="us-east-1")
- cf = boto3.client("cloudformation", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ cf_client = boto3.client("cloudformation", region_name="us-east-1")
bucket_name = "MyBucket"
template = {
@@ -53,21 +53,24 @@ def test_s3_bucket_cloudformation_with_properties():
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
- cf.create_stack(StackName="test_stack", TemplateBody=template_json)
- cf.describe_stacks(StackName="test_stack")
- s3.head_bucket(Bucket=bucket_name)
+ cf_client.create_stack(StackName="test_stack", TemplateBody=template_json)
+ cf_client.describe_stacks(StackName="test_stack")
+ s3_client.head_bucket(Bucket=bucket_name)
- encryption = s3.get_bucket_encryption(Bucket=bucket_name)
- encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
- "ApplyServerSideEncryptionByDefault"
- ]["SSEAlgorithm"].should.equal("AES256")
+ encryption = s3_client.get_bucket_encryption(Bucket=bucket_name)
+ assert (
+ encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
+ "ApplyServerSideEncryptionByDefault"
+ ]["SSEAlgorithm"]
+ == "AES256"
+ )
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_update_no_interruption():
- s3 = boto3.client("s3", region_name="us-east-1")
- cf = boto3.client("cloudformation", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ cf_client = boto3.client("cloudformation", region_name="us-east-1")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
@@ -75,9 +78,9 @@ def test_s3_bucket_cloudformation_update_no_interruption():
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
- cf.create_stack(StackName="test_stack", TemplateBody=template_json)
- stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
- s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
+ cf_client.create_stack(StackName="test_stack", TemplateBody=template_json)
+ stack_description = cf_client.describe_stacks(StackName="test_stack")["Stacks"][0]
+ s3_client.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
template = {
"AWSTemplateFormatVersion": "2010-09-09",
@@ -100,20 +103,23 @@ def test_s3_bucket_cloudformation_update_no_interruption():
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
- cf.update_stack(StackName="test_stack", TemplateBody=template_json)
- encryption = s3.get_bucket_encryption(
+ cf_client.update_stack(StackName="test_stack", TemplateBody=template_json)
+ encryption = s3_client.get_bucket_encryption(
Bucket=stack_description["Outputs"][0]["OutputValue"]
)
- encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
- "ApplyServerSideEncryptionByDefault"
- ]["SSEAlgorithm"].should.equal("AES256")
+ assert (
+ encryption["ServerSideEncryptionConfiguration"]["Rules"][0][
+ "ApplyServerSideEncryptionByDefault"
+ ]["SSEAlgorithm"]
+ == "AES256"
+ )
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_update_replacement():
- s3 = boto3.client("s3", region_name="us-east-1")
- cf = boto3.client("cloudformation", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ cf_client = boto3.client("cloudformation", region_name="us-east-1")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
@@ -121,9 +127,9 @@ def test_s3_bucket_cloudformation_update_replacement():
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
- cf.create_stack(StackName="test_stack", TemplateBody=template_json)
- stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
- s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
+ cf_client.create_stack(StackName="test_stack", TemplateBody=template_json)
+ stack_description = cf_client.describe_stacks(StackName="test_stack")["Stacks"][0]
+ s3_client.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
template = {
"AWSTemplateFormatVersion": "2010-09-09",
@@ -136,17 +142,17 @@ def test_s3_bucket_cloudformation_update_replacement():
"Outputs": {"Bucket": {"Value": {"Ref": "testInstance"}}},
}
template_json = json.dumps(template)
- cf.update_stack(StackName="test_stack", TemplateBody=template_json)
- stack_description = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
- s3.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
+ cf_client.update_stack(StackName="test_stack", TemplateBody=template_json)
+ stack_description = cf_client.describe_stacks(StackName="test_stack")["Stacks"][0]
+ s3_client.head_bucket(Bucket=stack_description["Outputs"][0]["OutputValue"])
@mock_s3
@mock_cloudformation
def test_s3_bucket_cloudformation_outputs():
region_name = "us-east-1"
- s3 = boto3.client("s3", region_name=region_name)
- cf = boto3.resource("cloudformation", region_name=region_name)
+ s3_client = boto3.client("s3", region_name=region_name)
+ cf_client = boto3.resource("cloudformation", region_name=region_name)
stack_name = "test-stack"
bucket_name = "test-bucket"
template = {
@@ -188,19 +194,19 @@ def test_s3_bucket_cloudformation_outputs():
},
},
}
- cf.create_stack(StackName=stack_name, TemplateBody=json.dumps(template))
- outputs_list = cf.Stack(stack_name).outputs
+ cf_client.create_stack(StackName=stack_name, TemplateBody=json.dumps(template))
+ outputs_list = cf_client.Stack(stack_name).outputs
output = {item["OutputKey"]: item["OutputValue"] for item in outputs_list}
- s3.head_bucket(Bucket=output["BucketName"])
- output["BucketARN"].should.match(f"arn:aws:s3.+{bucket_name}")
- output["BucketDomainName"].should.equal(f"{bucket_name}.s3.amazonaws.com")
- output["BucketDualStackDomainName"].should.equal(
+ s3_client.head_bucket(Bucket=output["BucketName"])
+ assert re.match(f"arn:aws:s3.+{bucket_name}", output["BucketARN"])
+ assert output["BucketDomainName"] == f"{bucket_name}.s3.amazonaws.com"
+ assert output["BucketDualStackDomainName"] == (
f"{bucket_name}.s3.dualstack.{region_name}.amazonaws.com"
)
- output["BucketRegionalDomainName"].should.equal(
+ assert output["BucketRegionalDomainName"] == (
f"{bucket_name}.s3.{region_name}.amazonaws.com"
)
- output["BucketWebsiteURL"].should.equal(
+ assert output["BucketWebsiteURL"] == (
f"http://{bucket_name}.s3-website.{region_name}.amazonaws.com"
)
- output["BucketName"].should.equal(bucket_name)
+ assert output["BucketName"] == bucket_name
diff --git a/tests/test_s3/test_s3_config.py b/tests/test_s3/test_s3_config.py
index d38a3eb31..b40289848 100644
--- a/tests/test_s3/test_s3_config.py
+++ b/tests/test_s3/test_s3_config.py
@@ -1,6 +1,5 @@
import json
import pytest
-import sure # noqa # pylint: disable=unused-import
from moto import mock_s3
from moto.core.exceptions import InvalidNextTokenException
@@ -53,30 +52,30 @@ def test_list_config_discovered_resources():
) == ([], None)
# With 10 buckets in us-west-2:
- for x in range(0, 10):
- s3_config_query_backend.create_bucket(f"bucket{x}", "us-west-2")
+ for idx in range(0, 10):
+ s3_config_query_backend.create_bucket(f"bucket{idx}", "us-west-2")
# With 2 buckets in eu-west-1:
- for x in range(10, 12):
- s3_config_query_backend.create_bucket(f"eu-bucket{x}", "eu-west-1")
+ for idx in range(10, 12):
+ s3_config_query_backend.create_bucket(f"eu-bucket{idx}", "eu-west-1")
result, next_token = s3_config_query.list_config_service_resources(
DEFAULT_ACCOUNT_ID, None, None, 100, None
)
assert not next_token
assert len(result) == 12
- for x in range(0, 10):
- assert result[x] == {
+ for idx in range(0, 10):
+ assert result[idx] == {
"type": "AWS::S3::Bucket",
- "id": f"bucket{x}",
- "name": f"bucket{x}",
+ "id": f"bucket{idx}",
+ "name": f"bucket{idx}",
"region": "us-west-2",
}
- for x in range(10, 12):
- assert result[x] == {
+ for idx in range(10, 12):
+ assert result[idx] == {
"type": "AWS::S3::Bucket",
- "id": f"eu-bucket{x}",
- "name": f"eu-bucket{x}",
+ "id": f"eu-bucket{idx}",
+ "name": f"eu-bucket{idx}",
"region": "eu-west-1",
}
diff --git a/tests/test_s3/test_s3_copyobject.py b/tests/test_s3/test_s3_copyobject.py
index 701e8df58..747c6d0ba 100644
--- a/tests/test_s3/test_s3_copyobject.py
+++ b/tests/test_s3/test_s3_copyobject.py
@@ -3,13 +3,10 @@ import datetime
import boto3
from botocore.client import ClientError
+from moto import mock_s3, mock_kms
from moto.s3.responses import DEFAULT_REGION_NAME
import pytest
-import sure # noqa # pylint: disable=unused-import
-
-from moto import mock_s3, mock_kms
-
@pytest.mark.parametrize(
"key_name",
@@ -22,38 +19,38 @@ from moto import mock_s3, mock_kms
)
@mock_s3
def test_copy_key_boto3(key_name):
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", key_name)
+ key = s3_resource.Object("foobar", key_name)
key.put(Body=b"some value")
- key2 = s3.Object("foobar", "new-key")
+ key2 = s3_resource.Object("foobar", "new-key")
key2.copy_from(CopySource=f"foobar/{key_name}")
resp = client.get_object(Bucket="foobar", Key=key_name)
- resp["Body"].read().should.equal(b"some value")
+ assert resp["Body"].read() == b"some value"
resp = client.get_object(Bucket="foobar", Key="new-key")
- resp["Body"].read().should.equal(b"some value")
+ assert resp["Body"].read() == b"some value"
@mock_s3
def test_copy_key_boto3_with_sha256_checksum():
# Setup
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
key_name = "key"
new_key = "new_key"
bucket = "foobar"
expected_hash = "qz0H8xacy9DtbEtF3iFRn5+TjHLSQSSZiquUnOg7tRs="
- s3.create_bucket(Bucket=bucket)
- key = s3.Object("foobar", key_name)
+ s3_resource.create_bucket(Bucket=bucket)
+ key = s3_resource.Object("foobar", key_name)
key.put(Body=b"some value")
# Execute
- key2 = s3.Object(bucket, new_key)
+ key2 = s3_resource.Object(bucket, new_key)
key2.copy(
CopySource={"Bucket": bucket, "Key": key_name},
ExtraArgs={"ChecksumAlgorithm": "SHA256"},
@@ -83,14 +80,14 @@ def test_copy_key_boto3_with_sha256_checksum():
@mock_s3
def test_copy_key_with_version_boto3():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
client.put_bucket_versioning(
Bucket="foobar", VersioningConfiguration={"Status": "Enabled"}
)
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
key.put(Body=b"some value")
key.put(Body=b"another value")
@@ -99,26 +96,26 @@ def test_copy_key_with_version_boto3():
]
old_version = [v for v in all_versions if not v["IsLatest"]][0]
- key2 = s3.Object("foobar", "new-key")
+ key2 = s3_resource.Object("foobar", "new-key")
key2.copy_from(CopySource=f"foobar/the-key?versionId={old_version['VersionId']}")
resp = client.get_object(Bucket="foobar", Key="the-key")
- resp["Body"].read().should.equal(b"another value")
+ assert resp["Body"].read() == b"another value"
resp = client.get_object(Bucket="foobar", Key="new-key")
- resp["Body"].read().should.equal(b"some value")
+ assert resp["Body"].read() == b"some value"
@mock_s3
def test_copy_object_with_bucketkeyenabled_returns_the_value():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "test-copy-object-with-bucketkeyenabled"
- s3.create_bucket(Bucket=bucket_name)
+ s3_resource.create_bucket(Bucket=bucket_name)
- key = s3.Object(bucket_name, "the-key")
+ key = s3_resource.Object(bucket_name, "the-key")
key.put(Body=b"some value")
- key2 = s3.Object(bucket_name, "new-key")
+ key2 = s3_resource.Object(bucket_name, "new-key")
key2.copy_from(
CopySource=f"{bucket_name}/the-key",
BucketKeyEnabled=True,
@@ -127,29 +124,30 @@ def test_copy_object_with_bucketkeyenabled_returns_the_value():
resp = client.get_object(Bucket=bucket_name, Key="the-key")
src_headers = resp["ResponseMetadata"]["HTTPHeaders"]
- src_headers.shouldnt.have.key("x-amz-server-side-encryption")
- src_headers.shouldnt.have.key("x-amz-server-side-encryption-aws-kms-key-id")
- src_headers.shouldnt.have.key("x-amz-server-side-encryption-bucket-key-enabled")
+ assert "x-amz-server-side-encryption" not in src_headers
+ assert "x-amz-server-side-encryption-aws-kms-key-id" not in src_headers
+ assert "x-amz-server-side-encryption-bucket-key-enabled" not in src_headers
resp = client.get_object(Bucket=bucket_name, Key="new-key")
target_headers = resp["ResponseMetadata"]["HTTPHeaders"]
- target_headers.should.have.key("x-amz-server-side-encryption")
+ assert "x-amz-server-side-encryption" in target_headers
# AWS will also return the KMS default key id - not yet implemented
- # target_headers.should.have.key("x-amz-server-side-encryption-aws-kms-key-id")
+ # assert "x-amz-server-side-encryption-aws-kms-key-id" in target_headers
# This field is only returned if encryption is set to 'aws:kms'
- target_headers.should.have.key("x-amz-server-side-encryption-bucket-key-enabled")
- str(
- target_headers["x-amz-server-side-encryption-bucket-key-enabled"]
- ).lower().should.equal("true")
+ assert "x-amz-server-side-encryption-bucket-key-enabled" in target_headers
+ assert (
+ str(target_headers["x-amz-server-side-encryption-bucket-key-enabled"]).lower()
+ == "true"
+ )
@mock_s3
def test_copy_key_with_metadata():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
metadata = {"md": "Metadatastring"}
content_type = "application/json"
initial = key.put(Body=b"{}", Metadata=metadata, ContentType=content_type)
@@ -157,18 +155,18 @@ def test_copy_key_with_metadata():
client.copy_object(Bucket="foobar", CopySource="foobar/the-key", Key="new-key")
resp = client.get_object(Bucket="foobar", Key="new-key")
- resp["Metadata"].should.equal(metadata)
- resp["ContentType"].should.equal(content_type)
- resp["ETag"].should.equal(initial["ETag"])
+ assert resp["Metadata"] == metadata
+ assert resp["ContentType"] == content_type
+ assert resp["ETag"] == initial["ETag"]
@mock_s3
def test_copy_key_replace_metadata():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- key = s3.Object("foobar", "the-key")
+ key = s3_resource.Object("foobar", "the-key")
initial = key.put(Body=b"some value", Metadata={"md": "Metadatastring"})
client.copy_object(
@@ -180,29 +178,31 @@ def test_copy_key_replace_metadata():
)
resp = client.get_object(Bucket="foobar", Key="new-key")
- resp["Metadata"].should.equal({"momd": "Mometadatastring"})
- resp["ETag"].should.equal(initial["ETag"])
+ assert resp["Metadata"] == {"momd": "Mometadatastring"}
+ assert resp["ETag"] == initial["ETag"]
@mock_s3
def test_copy_key_without_changes_should_error():
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "my_bucket"
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
key_name = "my_key"
- key = s3.Object(bucket_name, key_name)
+ key = s3_resource.Object(bucket_name, key_name)
- s3.create_bucket(Bucket=bucket_name)
+ s3_resource.create_bucket(Bucket=bucket_name)
key.put(Body=b"some value")
- with pytest.raises(ClientError) as e:
+ with pytest.raises(ClientError) as exc:
client.copy_object(
Bucket=bucket_name,
CopySource=f"{bucket_name}/{key_name}",
Key=key_name,
)
- e.value.response["Error"]["Message"].should.equal(
- "This copy request is illegal because it is trying to copy an object to itself without changing the object's metadata, storage class, website redirect location or encryption attributes."
+ assert exc.value.response["Error"]["Message"] == (
+ "This copy request is illegal because it is trying to copy an "
+ "object to itself without changing the object's metadata, storage "
+ "class, website redirect location or encryption attributes."
)
@@ -210,11 +210,11 @@ def test_copy_key_without_changes_should_error():
def test_copy_key_without_changes_should_not_error():
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "my_bucket"
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
key_name = "my_key"
- key = s3.Object(bucket_name, key_name)
+ key = s3_resource.Object(bucket_name, key_name)
- s3.create_bucket(Bucket=bucket_name)
+ s3_resource.create_bucket(Bucket=bucket_name)
key.put(Body=b"some value")
client.copy_object(
@@ -232,9 +232,9 @@ def test_copy_key_without_changes_should_not_error():
@mock_s3
def test_copy_key_reduced_redundancy():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("test_bucket")
+ bucket = s3_resource.Bucket("test_bucket")
bucket.create()
bucket.put_object(Key="the-key", Body=b"somedata")
@@ -246,18 +246,18 @@ def test_copy_key_reduced_redundancy():
StorageClass="REDUCED_REDUNDANCY",
)
- keys = dict([(k.key, k) for k in bucket.objects.all()])
- keys["new-key"].storage_class.should.equal("REDUCED_REDUNDANCY")
- keys["the-key"].storage_class.should.equal("STANDARD")
+ keys = {k.key: k for k in bucket.objects.all()}
+ assert keys["new-key"].storage_class == "REDUCED_REDUNDANCY"
+ assert keys["the-key"].storage_class == "STANDARD"
@mock_s3
def test_copy_non_existing_file():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
src = "srcbucket"
target = "target"
- s3.create_bucket(Bucket=src)
- s3.create_bucket(Bucket=target)
+ s3_resource.create_bucket(Bucket=src)
+ s3_resource.create_bucket(Bucket=target)
s3_client = boto3.client("s3")
with pytest.raises(ClientError) as exc:
@@ -265,9 +265,9 @@ def test_copy_non_existing_file():
Bucket=target, CopySource={"Bucket": src, "Key": "foofoofoo"}, Key="newkey"
)
err = exc.value.response["Error"]
- err["Code"].should.equal("NoSuchKey")
- err["Message"].should.equal("The specified key does not exist.")
- err["Key"].should.equal("foofoofoo")
+ assert err["Code"] == "NoSuchKey"
+ assert err["Message"] == "The specified key does not exist."
+ assert err["Key"] == "foofoofoo"
@mock_s3
@@ -284,7 +284,7 @@ def test_copy_object_with_versioning():
client.put_object(Bucket="blah", Key="test1", Body=b"test1")
client.put_object(Bucket="blah", Key="test2", Body=b"test2")
- client.get_object(Bucket="blah", Key="test1")["VersionId"]
+ _ = client.get_object(Bucket="blah", Key="test1")["VersionId"]
obj2_version = client.get_object(Bucket="blah", Key="test2")["VersionId"]
client.copy_object(
@@ -293,7 +293,7 @@ def test_copy_object_with_versioning():
obj2_version_new = client.get_object(Bucket="blah", Key="test2")["VersionId"]
# Version should be different to previous version
- obj2_version_new.should_not.equal(obj2_version)
+ assert obj2_version_new != obj2_version
client.copy_object(
CopySource={"Bucket": "blah", "Key": "test2", "VersionId": obj2_version},
@@ -301,16 +301,16 @@ def test_copy_object_with_versioning():
Key="test3",
)
obj3_version_new = client.get_object(Bucket="blah", Key="test3")["VersionId"]
- obj3_version_new.should_not.equal(obj2_version_new)
+ assert obj3_version_new != obj2_version_new
# Copy file that doesn't exist
- with pytest.raises(ClientError) as e:
+ with pytest.raises(ClientError) as exc:
client.copy_object(
CopySource={"Bucket": "blah", "Key": "test4", "VersionId": obj2_version},
Bucket="blah",
Key="test5",
)
- e.value.response["Error"]["Code"].should.equal("NoSuchKey")
+ assert exc.value.response["Error"]["Code"] == "NoSuchKey"
response = client.create_multipart_upload(Bucket="blah", Key="test4")
upload_id = response["UploadId"]
@@ -331,7 +331,7 @@ def test_copy_object_with_versioning():
response = client.get_object(Bucket="blah", Key="test4")
data = response["Body"].read()
- data.should.equal(b"test2")
+ assert data == b"test2"
@mock_s3
@@ -355,7 +355,7 @@ def test_copy_object_from_unversioned_to_versioned_bucket():
).get("VersionId")
# VersionId should be present in the response
- obj2_version_new.should_not.equal(None)
+ assert obj2_version_new is not None
@mock_s3
@@ -376,8 +376,8 @@ def test_copy_object_with_replacement_tagging():
Tagging="aws:tag=invalid_key",
)
- e = err.value
- e.response["Error"]["Code"].should.equal("InvalidTag")
+ exc = err.value
+ assert exc.response["Error"]["Code"] == "InvalidTag"
client.copy_object(
CopySource={"Bucket": "mybucket", "Key": "original"},
@@ -394,9 +394,9 @@ def test_copy_object_with_replacement_tagging():
)
tags1 = client.get_object_tagging(Bucket="mybucket", Key="copy1")["TagSet"]
- tags1.should.equal([{"Key": "tag", "Value": "new"}])
+ assert tags1 == [{"Key": "tag", "Value": "new"}]
tags2 = client.get_object_tagging(Bucket="mybucket", Key="copy2")["TagSet"]
- tags2.should.equal([{"Key": "tag", "Value": "old"}])
+ assert tags2 == [{"Key": "tag", "Value": "old"}]
@mock_s3
@@ -428,10 +428,10 @@ def test_copy_object_with_kms_encryption():
@mock_kms
def test_copy_object_in_place_with_encryption():
kms_client = boto3.client("kms", region_name=DEFAULT_REGION_NAME)
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
kms_key = kms_client.create_key()["KeyMetadata"]["KeyId"]
- bucket = s3.Bucket("test_bucket")
+ bucket = s3_resource.Bucket("test_bucket")
bucket.create()
key = "source-key"
resp = client.put_object(
@@ -464,8 +464,9 @@ def test_copy_object_in_place_with_encryption():
)
assert "BucketKeyEnabled" not in resp
- # this is an edge case, if the source object SSE was not AES256, AWS allows you to not specify any fields
- # as it will use AES256 by default and is different from the source key
+ # This is an edge case, if the source object SSE was not AES256,
+ # AWS allows you to not specify any fields as it will use AES256 by
+ # default and is different from the source key.
resp = client.copy_object(
Bucket="test_bucket",
CopySource=f"test_bucket/{key}",
@@ -473,7 +474,8 @@ def test_copy_object_in_place_with_encryption():
)
assert resp["ServerSideEncryption"] == "AES256"
- # check that it allows copying in the place with the same ServerSideEncryption setting as the source
+ # Check that it allows copying in the place with the same
+ # ServerSideEncryption setting as the source.
resp = client.copy_object(
Bucket="test_bucket",
CopySource=f"test_bucket/{key}",
@@ -485,11 +487,14 @@ def test_copy_object_in_place_with_encryption():
@mock_s3
def test_copy_object_in_place_with_storage_class():
- # this test will validate that setting StorageClass (even the same as source) allows a copy in place
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ """Validate setting StorageClass allows a copy in place.
+
+ This should be true even if destination object is the same as source.
+ """
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "test-bucket"
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
bucket.create()
key = "source-key"
bucket.put_object(Key=key, Body=b"somedata", StorageClass="STANDARD")
@@ -508,9 +513,9 @@ def test_copy_object_in_place_with_storage_class():
@mock_s3
def test_copy_object_does_not_copy_storage_class():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- bucket = s3.Bucket("test_bucket")
+ bucket = s3_resource.Bucket("test_bucket")
bucket.create()
source_key = "source-key"
dest_key = "dest-key"
@@ -522,17 +527,17 @@ def test_copy_object_does_not_copy_storage_class():
)
# Verify that the destination key does not have STANDARD_IA as StorageClass
- keys = dict([(k.key, k) for k in bucket.objects.all()])
- keys[source_key].storage_class.should.equal("STANDARD_IA")
- keys[dest_key].storage_class.should.equal("STANDARD")
+ keys = {k.key: k for k in bucket.objects.all()}
+ assert keys[source_key].storage_class == "STANDARD_IA"
+ assert keys[dest_key].storage_class == "STANDARD"
@mock_s3
def test_copy_object_does_not_copy_acl():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "testbucket"
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
bucket.create()
source_key = "source-key"
dest_key = "dest-key"
@@ -560,24 +565,27 @@ def test_copy_object_does_not_copy_acl():
@mock_s3
def test_copy_object_in_place_with_metadata():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "testbucket"
- bucket = s3.Bucket(bucket_name)
+ bucket = s3_resource.Bucket(bucket_name)
bucket.create()
key_name = "source-key"
bucket.put_object(Key=key_name, Body=b"somedata")
- # test that giving metadata is not enough, and should provide MetadataDirective=REPLACE on top
- with pytest.raises(ClientError) as e:
+ # test that giving metadata is not enough and should provide
+ # MetadataDirective=REPLACE on top.
+ with pytest.raises(ClientError) as exc:
client.copy_object(
Bucket=bucket_name,
CopySource=f"{bucket_name}/{key_name}",
Key=key_name,
Metadata={"key": "value"},
)
- e.value.response["Error"]["Message"].should.equal(
- "This copy request is illegal because it is trying to copy an object to itself without changing the object's metadata, storage class, website redirect location or encryption attributes."
+ assert exc.value.response["Error"]["Message"] == (
+ "This copy request is illegal because it is trying to copy an "
+ "object to itself without changing the object's metadata, "
+ "storage class, website redirect location or encryption attributes."
)
# you can only provide MetadataDirective=REPLACE and it will copy without any metadata
@@ -672,7 +680,8 @@ def test_copy_object_in_place_website_redirect_location():
bucket_name = "testbucket"
key = "source-key"
client.create_bucket(Bucket=bucket_name)
- # this test will validate that setting WebsiteRedirectLocation (even the same as source) allows a copy in place
+ # This test will validate that setting WebsiteRedirectLocation
+ # (even the same as source) allows a copy in place.
client.put_object(
Bucket=bucket_name,
@@ -738,11 +747,18 @@ def test_copy_object_in_place_with_bucket_encryption():
["CRC32", "SHA1", "SHA256"],
)
def test_copy_key_boto3_with_both_sha256_checksum(algorithm):
- # This test will validate that moto S3 checksum calculations are correct
- # We first create an object with a Checksum calculated by boto, by specifying ChecksumAlgorithm="SHA256"
- # we then retrieve the right checksum from this request
- # we copy the object while requesting moto to recalculate the checksum for that key
- # we verify that both checksums are equal
+ """Validate that moto S3 checksum calculations are correct.
+
+ We first create an object with a Checksum calculated by boto, by
+ specifying ChecksumAlgorithm="SHA256".
+
+ We then retrieve the right checksum from this request.
+
+ We copy the object while requesting moto to recalculate the checksum
+ for that key.
+
+ We verify that both checksums are equal.
+ """
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
source_key = "source-key"
dest_key = "dest-key"
diff --git a/tests/test_s3/test_s3_custom_endpoint.py b/tests/test_s3/test_s3_custom_endpoint.py
index 11c445d14..78a961257 100644
--- a/tests/test_s3/test_s3_custom_endpoint.py
+++ b/tests/test_s3/test_s3_custom_endpoint.py
@@ -1,11 +1,11 @@
-import boto3
-import sure # noqa # pylint: disable=unused-import
import os
+from unittest import SkipTest
+from unittest.mock import patch
+
+import boto3
import pytest
from moto import mock_s3, settings
-from unittest import SkipTest
-from unittest.mock import patch
DEFAULT_REGION_NAME = "us-east-1"
@@ -25,9 +25,9 @@ def test_create_and_list_buckets(url):
conn = boto3.resource("s3", endpoint_url=url)
conn.create_bucket(Bucket=bucket)
- s3 = boto3.client("s3", endpoint_url=url)
- all_buckets = s3.list_buckets()["Buckets"]
- [b["Name"] for b in all_buckets].should.contain(bucket)
+ s3_client = boto3.client("s3", endpoint_url=url)
+ all_buckets = s3_client.list_buckets()["Buckets"]
+ assert bucket in [b["Name"] for b in all_buckets]
@pytest.mark.parametrize("url", [CUSTOM_ENDPOINT, CUSTOM_ENDPOINT_2])
@@ -45,9 +45,9 @@ def test_create_and_list_buckets_with_multiple_supported_endpoints(url):
conn = boto3.resource("s3", endpoint_url=url)
conn.create_bucket(Bucket=bucket)
- s3 = boto3.client("s3", endpoint_url=url)
- all_buckets = s3.list_buckets()["Buckets"]
- [b["Name"] for b in all_buckets].should.contain(bucket)
+ s3_client = boto3.client("s3", endpoint_url=url)
+ all_buckets = s3_client.list_buckets()["Buckets"]
+ assert bucket in [b["Name"] for b in all_buckets]
@pytest.mark.parametrize("url", [CUSTOM_ENDPOINT, CUSTOM_ENDPOINT_2])
@@ -63,12 +63,12 @@ def test_put_and_get_object(url):
conn = boto3.resource("s3", endpoint_url=url)
conn.create_bucket(Bucket=bucket)
- s3 = boto3.client("s3", endpoint_url=url)
- s3.put_object(Bucket=bucket, Key=key, Body=contents)
+ s3_client = boto3.client("s3", endpoint_url=url)
+ s3_client.put_object(Bucket=bucket, Key=key, Body=contents)
body = conn.Object(bucket, key).get()["Body"].read().decode()
- body.should.equal(contents)
+ assert body == contents
@pytest.mark.parametrize("url", [CUSTOM_ENDPOINT, CUSTOM_ENDPOINT_2])
@@ -80,12 +80,12 @@ def test_put_and_list_objects(url):
with mock_s3():
bucket = "mybucket"
- s3 = boto3.client("s3", endpoint_url=url)
- s3.create_bucket(Bucket=bucket)
- s3.put_object(Bucket=bucket, Key="one", Body=b"1")
- s3.put_object(Bucket=bucket, Key="two", Body=b"22")
- s3.put_object(Bucket=bucket, Key="three", Body=b"333")
+ s3_client = boto3.client("s3", endpoint_url=url)
+ s3_client.create_bucket(Bucket=bucket)
+ s3_client.put_object(Bucket=bucket, Key="one", Body=b"1")
+ s3_client.put_object(Bucket=bucket, Key="two", Body=b"22")
+ s3_client.put_object(Bucket=bucket, Key="three", Body=b"333")
- contents = s3.list_objects(Bucket=bucket)["Contents"]
- contents.should.have.length_of(3)
- [c["Key"] for c in contents].should.contain("two")
+ contents = s3_client.list_objects(Bucket=bucket)["Contents"]
+ assert len(contents) == 3
+ assert "two" in [c["Key"] for c in contents]
diff --git a/tests/test_s3/test_s3_encryption.py b/tests/test_s3/test_s3_encryption.py
index 3c566435f..e8f8683cf 100644
--- a/tests/test_s3/test_s3_encryption.py
+++ b/tests/test_s3/test_s3_encryption.py
@@ -1,9 +1,10 @@
+from uuid import uuid4
+
import boto3
+from botocore.exceptions import ClientError
import pytest
-from botocore.exceptions import ClientError
from moto import mock_s3
-from uuid import uuid4
@mock_s3
@@ -14,11 +15,9 @@ def test_encryption_on_new_bucket_fails():
with pytest.raises(ClientError) as exc:
conn.get_bucket_encryption(Bucket="mybucket")
err = exc.value.response["Error"]
- err["Code"].should.equal("ServerSideEncryptionConfigurationNotFoundError")
- err["Message"].should.equal(
- "The server side encryption configuration was not found"
- )
- err["BucketName"].should.equal("mybucket")
+ assert err["Code"] == "ServerSideEncryptionConfigurationNotFoundError"
+ assert err["Message"] == "The server side encryption configuration was not found"
+ assert err["BucketName"] == "mybucket"
@mock_s3
@@ -46,7 +45,7 @@ def test_put_and_get_encryption():
assert "ServerSideEncryptionConfiguration" in resp
return_config = sse_config.copy()
return_config["Rules"][0]["BucketKeyEnabled"] = False
- assert resp["ServerSideEncryptionConfiguration"].should.equal(return_config)
+ assert resp["ServerSideEncryptionConfiguration"] == return_config
@mock_s3
@@ -75,65 +74,65 @@ def test_delete_and_get_encryption():
with pytest.raises(ClientError) as exc:
conn.get_bucket_encryption(Bucket="mybucket")
err = exc.value.response["Error"]
- err["Code"].should.equal("ServerSideEncryptionConfigurationNotFoundError")
+ assert err["Code"] == "ServerSideEncryptionConfigurationNotFoundError"
@mock_s3
def test_encryption_status_on_new_objects():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket=bucket_name)
- s3.put_object(Bucket=bucket_name, Body=b"test", Key="file.txt")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_object(Bucket=bucket_name, Body=b"test", Key="file.txt")
# verify encryption status on object itself
- res = s3.get_object(Bucket=bucket_name, Key="file.txt")
- res.shouldnt.have.key("ServerSideEncryption")
+ res = s3_client.get_object(Bucket=bucket_name, Key="file.txt")
+ assert "ServerSideEncryption" not in res
# enable encryption
sse_config = {
"Rules": [{"ApplyServerSideEncryptionByDefault": {"SSEAlgorithm": "AES256"}}]
}
- s3.put_bucket_encryption(
+ s3_client.put_bucket_encryption(
Bucket=bucket_name, ServerSideEncryptionConfiguration=sse_config
)
# verify encryption status on existing object hasn't changed
- res = s3.get_object(Bucket=bucket_name, Key="file.txt")
- res.shouldnt.have.key("ServerSideEncryption")
+ res = s3_client.get_object(Bucket=bucket_name, Key="file.txt")
+ assert "ServerSideEncryption" not in res
# create object2
- s3.put_object(Bucket=bucket_name, Body=b"test", Key="file2.txt")
+ s3_client.put_object(Bucket=bucket_name, Body=b"test", Key="file2.txt")
# verify encryption status on object2
- res = s3.get_object(Bucket=bucket_name, Key="file2.txt")
- res.should.have.key("ServerSideEncryption").equals("AES256")
+ res = s3_client.get_object(Bucket=bucket_name, Key="file2.txt")
+ assert res["ServerSideEncryption"] == "AES256"
@mock_s3
def test_encryption_status_on_copied_objects():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket=bucket_name)
- s3.put_object(Bucket=bucket_name, Body=b"test", Key="file.txt")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_object(Bucket=bucket_name, Body=b"test", Key="file.txt")
# enable encryption
sse_config = {
"Rules": [{"ApplyServerSideEncryptionByDefault": {"SSEAlgorithm": "AES256"}}]
}
- s3.put_bucket_encryption(
+ s3_client.put_bucket_encryption(
Bucket=bucket_name, ServerSideEncryptionConfiguration=sse_config
)
# copy object
- s3.copy_object(
+ s3_client.copy_object(
CopySource=f"{bucket_name}/file.txt", Bucket=bucket_name, Key="file2.txt"
)
# verify encryption status on object1 hasn't changed
- res = s3.get_object(Bucket=bucket_name, Key="file.txt")
- res.shouldnt.have.key("ServerSideEncryption")
+ res = s3_client.get_object(Bucket=bucket_name, Key="file.txt")
+ assert "ServerSideEncryption" not in res
# verify encryption status on object2 does have encryption
- res = s3.get_object(Bucket=bucket_name, Key="file2.txt")
- res.should.have.key("ServerSideEncryption").equals("AES256")
+ res = s3_client.get_object(Bucket=bucket_name, Key="file2.txt")
+ assert res["ServerSideEncryption"] == "AES256"
@mock_s3
def test_encryption_bucket_key_for_aes_not_returned():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket=bucket_name)
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket=bucket_name)
# enable encryption
sse_config = {
"Rules": [
@@ -143,8 +142,8 @@ def test_encryption_bucket_key_for_aes_not_returned():
}
]
}
- s3.put_bucket_encryption(
+ s3_client.put_bucket_encryption(
Bucket=bucket_name, ServerSideEncryptionConfiguration=sse_config
)
- res = s3.put_object(Bucket=bucket_name, Body=b"test", Key="file.txt")
- res.shouldnt.have.key("BucketKeyEnabled")
+ res = s3_client.put_object(Bucket=bucket_name, Body=b"test", Key="file.txt")
+ assert "BucketKeyEnabled" not in res
diff --git a/tests/test_s3/test_s3_file_handles.py b/tests/test_s3/test_s3_file_handles.py
index bd601b82b..af5ab4e11 100644
--- a/tests/test_s3/test_s3_file_handles.py
+++ b/tests/test_s3/test_s3_file_handles.py
@@ -1,13 +1,15 @@
-import boto3
import copy
import gc
import warnings
from functools import wraps
+from unittest import SkipTest, TestCase
+
+import boto3
+
from moto import settings, mock_s3
from moto.dynamodb.models import DynamoDBBackend
from moto.s3 import models as s3model, s3_backends
from moto.s3.responses import S3ResponseInstance
-from unittest import SkipTest, TestCase
from tests import DEFAULT_ACCOUNT_ID
@@ -17,49 +19,51 @@ TEST_BUCKET_VERSIONED = "versioned-bucket"
TEST_KEY = "my-key"
-def verify_zero_warnings(f):
- @wraps(f)
+def verify_zero_warnings(func):
+ @wraps(func)
def wrapped(*args, **kwargs):
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always", ResourceWarning) # add filter
- resp = f(*args, **kwargs)
- # Get the TestClass reference, and reset any S3Backends that we've created as part of that class
+ resp = func(*args, **kwargs)
+ # Get the TestClass reference, and reset any S3Backends that
+ # we've created as part of that class.
(class_ref,) = args
for obj in class_ref.__dict__:
if isinstance(obj, s3model.S3Backend):
obj.reset()
- # Now collect garbage, which will throw any warnings if there are unclosed FileHandles
+ # Now collect garbage, which will throw any warnings if there
+ # are unclosed FileHandles.
gc.collect()
warning_types = [type(warn.message) for warn in warning_list]
- warning_types.shouldnt.contain(ResourceWarning)
+ assert ResourceWarning not in warning_types
return resp
return wrapped
class TestS3FileHandleClosures(TestCase):
- """
- Large Uploads are written to disk for performance reasons
- These tests verifies that the filehandles are properly closed after specific actions
+ """Verifies filehandles are properly closed after specific actions.
+
+ Large Uploads are written to disk for performance reasons.
"""
def setUp(self) -> None:
if settings.TEST_SERVER_MODE:
raise SkipTest("No point in testing ServerMode, we're not using boto3")
- self.s3 = s3_backends[DEFAULT_ACCOUNT_ID]["global"]
- self.s3.create_bucket(TEST_BUCKET, "us-west-1")
- self.s3.create_bucket(TEST_BUCKET_VERSIONED, "us-west-1")
- self.s3.put_object(TEST_BUCKET, TEST_KEY, "x" * 10_000_000)
+ self.s3_client = s3_backends[DEFAULT_ACCOUNT_ID]["global"]
+ self.s3_client.create_bucket(TEST_BUCKET, "us-west-1")
+ self.s3_client.create_bucket(TEST_BUCKET_VERSIONED, "us-west-1")
+ self.s3_client.put_object(TEST_BUCKET, TEST_KEY, "x" * 10_000_000)
def tearDown(self) -> None:
for bucket_name in (
TEST_BUCKET,
TEST_BUCKET_VERSIONED,
):
- keys = list(self.s3.get_bucket(bucket_name).keys.keys())
+ keys = list(self.s3_client.get_bucket(bucket_name).keys.keys())
for key in keys:
- self.s3.delete_object(bucket_name, key)
- self.s3.delete_bucket(bucket_name)
+ self.s3_client.delete_object(bucket_name, key)
+ self.s3_client.delete_bucket(bucket_name)
@verify_zero_warnings
def test_upload_large_file(self):
@@ -69,27 +73,27 @@ class TestS3FileHandleClosures(TestCase):
@verify_zero_warnings
def test_delete_large_file(self):
- self.s3.delete_object(bucket_name=TEST_BUCKET, key_name=TEST_KEY)
+ self.s3_client.delete_object(bucket_name=TEST_BUCKET, key_name=TEST_KEY)
@verify_zero_warnings
def test_overwriting_file(self):
- self.s3.put_object(TEST_BUCKET, TEST_KEY, "b" * 10_000_000)
+ self.s3_client.put_object(TEST_BUCKET, TEST_KEY, "b" * 10_000_000)
@verify_zero_warnings
def test_versioned_file(self):
- self.s3.put_bucket_versioning(TEST_BUCKET, "Enabled")
- self.s3.put_object(TEST_BUCKET, TEST_KEY, "b" * 10_000_000)
+ self.s3_client.put_bucket_versioning(TEST_BUCKET, "Enabled")
+ self.s3_client.put_object(TEST_BUCKET, TEST_KEY, "b" * 10_000_000)
@verify_zero_warnings
def test_copy_object(self):
- key = self.s3.get_object(TEST_BUCKET, TEST_KEY)
- self.s3.copy_object(
+ key = self.s3_client.get_object(TEST_BUCKET, TEST_KEY)
+ self.s3_client.copy_object(
src_key=key, dest_bucket_name=TEST_BUCKET, dest_key_name="key-2"
)
@verify_zero_warnings
def test_part_upload(self):
- multipart_id = self.s3.create_multipart_upload(
+ multipart_id = self.s3_client.create_multipart_upload(
bucket_name=TEST_BUCKET,
key_name="mp-key",
metadata={},
@@ -99,7 +103,7 @@ class TestS3FileHandleClosures(TestCase):
sse_encryption=None,
kms_key_id=None,
)
- self.s3.upload_part(
+ self.s3_client.upload_part(
bucket_name=TEST_BUCKET,
multipart_id=multipart_id,
part_id=1,
@@ -108,7 +112,7 @@ class TestS3FileHandleClosures(TestCase):
@verify_zero_warnings
def test_overwriting_part_upload(self):
- multipart_id = self.s3.create_multipart_upload(
+ multipart_id = self.s3_client.create_multipart_upload(
bucket_name=TEST_BUCKET,
key_name="mp-key",
metadata={},
@@ -118,13 +122,13 @@ class TestS3FileHandleClosures(TestCase):
sse_encryption=None,
kms_key_id=None,
)
- self.s3.upload_part(
+ self.s3_client.upload_part(
bucket_name=TEST_BUCKET,
multipart_id=multipart_id,
part_id=1,
value="b" * 10_000_000,
)
- self.s3.upload_part(
+ self.s3_client.upload_part(
bucket_name=TEST_BUCKET,
multipart_id=multipart_id,
part_id=1,
@@ -133,7 +137,7 @@ class TestS3FileHandleClosures(TestCase):
@verify_zero_warnings
def test_aborting_part_upload(self):
- multipart_id = self.s3.create_multipart_upload(
+ multipart_id = self.s3_client.create_multipart_upload(
bucket_name=TEST_BUCKET,
key_name="mp-key",
metadata={},
@@ -143,19 +147,19 @@ class TestS3FileHandleClosures(TestCase):
sse_encryption=None,
kms_key_id=None,
)
- self.s3.upload_part(
+ self.s3_client.upload_part(
bucket_name=TEST_BUCKET,
multipart_id=multipart_id,
part_id=1,
value="b" * 10_000_000,
)
- self.s3.abort_multipart_upload(
+ self.s3_client.abort_multipart_upload(
bucket_name=TEST_BUCKET, multipart_id=multipart_id
)
@verify_zero_warnings
def test_completing_part_upload(self):
- multipart_id = self.s3.create_multipart_upload(
+ multipart_id = self.s3_client.create_multipart_upload(
bucket_name=TEST_BUCKET,
key_name="mp-key",
metadata={},
@@ -165,78 +169,87 @@ class TestS3FileHandleClosures(TestCase):
sse_encryption=None,
kms_key_id=None,
)
- etag = self.s3.upload_part(
+ etag = self.s3_client.upload_part(
bucket_name=TEST_BUCKET,
multipart_id=multipart_id,
part_id=1,
value="b" * 10_000_000,
).etag
- mp_body = f"""{etag}1"""
+ mp_body = (
+ "'
+ f"{etag}1"
+ ""
+ )
body = S3ResponseInstance._complete_multipart_body(mp_body)
- self.s3.complete_multipart_upload(
+ self.s3_client.complete_multipart_upload(
bucket_name=TEST_BUCKET, multipart_id=multipart_id, body=body
)
@verify_zero_warnings
def test_single_versioned_upload(self):
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
@verify_zero_warnings
def test_overwrite_versioned_upload(self):
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
@verify_zero_warnings
def test_multiple_versions_upload(self):
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "y" * 10_000_000)
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "z" * 10_000_000)
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "y" * 10_000_000)
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "z" * 10_000_000)
@verify_zero_warnings
def test_delete_versioned_upload(self):
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
- self.s3.delete_object(bucket_name=TEST_BUCKET, key_name=TEST_KEY)
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
+ self.s3_client.delete_object(bucket_name=TEST_BUCKET, key_name=TEST_KEY)
@verify_zero_warnings
def test_delete_specific_version(self):
- self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
- key = self.s3.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "y" * 10_000_000)
- self.s3.delete_object(
+ self.s3_client.put_object(TEST_BUCKET_VERSIONED, TEST_KEY, "x" * 10_000_000)
+ key = self.s3_client.put_object(
+ TEST_BUCKET_VERSIONED, TEST_KEY, "y" * 10_000_000
+ )
+ self.s3_client.delete_object(
bucket_name=TEST_BUCKET, key_name=TEST_KEY, version_id=key._version_id
)
@verify_zero_warnings
def test_reset_other_backend(self):
- db = DynamoDBBackend("us-west-1", "1234")
- # This used to empty the entire list of `model_instances`, which can contain FakeKey-references
- # Verify that we can reset an unrelated backend, without throwing away FakeKey-references that still need to be disposed
- db.reset()
+ dbase = DynamoDBBackend("us-west-1", "1234")
+ # This used to empty the entire list of `model_instances`, which
+ # can contain FakeKey-references.
+ # Verify that we can reset an unrelated backend, without throwing
+ # away FakeKey-references that still need to be disposed.
+ dbase.reset()
class TestS3FileHandleClosuresUsingMocks(TestCase):
def setUp(self) -> None:
- self.s3 = boto3.client("s3", "us-east-1")
+ self.s3_client = boto3.client("s3", "us-east-1")
@verify_zero_warnings
@mock_s3
def test_use_decorator(self):
- self.s3.create_bucket(Bucket="foo")
- self.s3.put_object(Bucket="foo", Key="bar", Body="stuff")
+ self.s3_client.create_bucket(Bucket="foo")
+ self.s3_client.put_object(Bucket="foo", Key="bar", Body="stuff")
@verify_zero_warnings
@mock_s3
def test_use_decorator_and_context_mngt(self):
with mock_s3():
- self.s3.create_bucket(Bucket="foo")
- self.s3.put_object(Bucket="foo", Key="bar", Body="stuff")
+ self.s3_client.create_bucket(Bucket="foo")
+ self.s3_client.put_object(Bucket="foo", Key="bar", Body="stuff")
@verify_zero_warnings
def test_use_multiple_context_managers(self):
with mock_s3():
- self.s3.create_bucket(Bucket="foo")
- self.s3.put_object(Bucket="foo", Key="bar", Body="stuff")
+ self.s3_client.create_bucket(Bucket="foo")
+ self.s3_client.put_object(Bucket="foo", Key="bar", Body="stuff")
with mock_s3():
pass
@@ -244,16 +257,16 @@ class TestS3FileHandleClosuresUsingMocks(TestCase):
@verify_zero_warnings
def test_create_multipart(self):
with mock_s3():
- self.s3.create_bucket(Bucket="foo")
- self.s3.put_object(Bucket="foo", Key="k1", Body="stuff")
+ self.s3_client.create_bucket(Bucket="foo")
+ self.s3_client.put_object(Bucket="foo", Key="k1", Body="stuff")
- mp = self.s3.create_multipart_upload(Bucket="foo", Key="key2")
- self.s3.upload_part(
+ mpart = self.s3_client.create_multipart_upload(Bucket="foo", Key="key2")
+ self.s3_client.upload_part(
Body=b"hello",
PartNumber=1,
Bucket="foo",
Key="key2",
- UploadId=mp["UploadId"],
+ UploadId=mpart["UploadId"],
)
with mock_s3():
@@ -262,9 +275,9 @@ class TestS3FileHandleClosuresUsingMocks(TestCase):
@verify_zero_warnings
def test_overwrite_file(self):
with mock_s3():
- self.s3.create_bucket(Bucket="foo")
- self.s3.put_object(Bucket="foo", Key="k1", Body="stuff")
- self.s3.put_object(Bucket="foo", Key="k1", Body="b" * 10_000_000)
+ self.s3_client.create_bucket(Bucket="foo")
+ self.s3_client.put_object(Bucket="foo", Key="k1", Body="stuff")
+ self.s3_client.put_object(Bucket="foo", Key="k1", Body="b" * 10_000_000)
with mock_s3():
pass
@@ -272,28 +285,30 @@ class TestS3FileHandleClosuresUsingMocks(TestCase):
@verify_zero_warnings
def test_delete_object_with_version(self):
with mock_s3():
- self.s3.create_bucket(Bucket="foo")
- self.s3.put_bucket_versioning(
+ self.s3_client.create_bucket(Bucket="foo")
+ self.s3_client.put_bucket_versioning(
Bucket="foo",
VersioningConfiguration={"Status": "Enabled", "MFADelete": "Disabled"},
)
- version = self.s3.put_object(Bucket="foo", Key="b", Body="s")["VersionId"]
- self.s3.delete_object(Bucket="foo", Key="b", VersionId=version)
+ version = self.s3_client.put_object(Bucket="foo", Key="b", Body="s")[
+ "VersionId"
+ ]
+ self.s3_client.delete_object(Bucket="foo", Key="b", VersionId=version)
@verify_zero_warnings
def test_update_versioned_object__while_looping(self):
for _ in (1, 2):
with mock_s3():
- self.s3.create_bucket(Bucket="foo")
- self.s3.put_bucket_versioning(
+ self.s3_client.create_bucket(Bucket="foo")
+ self.s3_client.put_bucket_versioning(
Bucket="foo",
VersioningConfiguration={
"Status": "Enabled",
"MFADelete": "Disabled",
},
)
- self.s3.put_object(Bucket="foo", Key="bar", Body="stuff")
- self.s3.put_object(Bucket="foo", Key="bar", Body="stuff2")
+ self.s3_client.put_object(Bucket="foo", Key="bar", Body="stuff")
+ self.s3_client.put_object(Bucket="foo", Key="bar", Body="stuff2")
def test_verify_key_can_be_copied_after_disposing():
@@ -302,13 +317,15 @@ def test_verify_key_can_be_copied_after_disposing():
# - User: calls list_object_versions
# - Moto creates a list of all keys
# - User: deletes a key
- # - Moto iterates over the previously created list, that contains a now-deleted key, and creates a copy of it
+ # - Moto iterates over the previously created list, that contains i
+ # a now-deleted key, and creates a copy of it
#
- # This test verifies the copy-operation succeeds, it will just not have any data
+ # This test verifies the copy-operation succeeds, it will just not
+ # have any data.
key = s3model.FakeKey(name="test", bucket_name="bucket", value="sth")
assert not key._value_buffer.closed
key.dispose()
assert key._value_buffer.closed
copied_key = copy.copy(key)
- copied_key.value.should.equal(b"")
+ assert copied_key.value == b""
diff --git a/tests/test_s3/test_s3_lambda_integration.py b/tests/test_s3/test_s3_lambda_integration.py
index c7ba99349..e70fc6785 100644
--- a/tests/test_s3/test_s3_lambda_integration.py
+++ b/tests/test_s3/test_s3_lambda_integration.py
@@ -1,6 +1,9 @@
-import boto3
import json
+from uuid import uuid4
+
+import boto3
import pytest
+
from moto import mock_lambda, mock_logs, mock_s3, mock_sqs
from moto.core import DEFAULT_ACCOUNT_ID as ACCOUNT_ID
from tests.markers import requires_docker
@@ -9,7 +12,6 @@ from tests.test_awslambda.utilities import (
get_role_name,
wait_for_log_msg,
)
-from uuid import uuid4
REGION_NAME = "us-east-1"
@@ -89,24 +91,20 @@ def test_objectcreated_put__invokes_lambda(match_events, actual_event):
records = [line for line in all_logs if line.startswith("{'Records'")][0]
records = json.loads(records.replace("'", '"'))["Records"]
- records.should.have.length_of(1)
- records[0].should.have.key("awsRegion").equals(REGION_NAME)
- records[0].should.have.key("eventName").equals(actual_event)
- records[0].should.have.key("eventSource").equals("aws:s3")
- records[0].should.have.key("eventTime")
- records[0].should.have.key("s3")
- records[0]["s3"].should.have.key("bucket")
- records[0]["s3"]["bucket"].should.have.key("arn").equals(
- f"arn:aws:s3:::{bucket_name}"
- )
- records[0]["s3"]["bucket"].should.have.key("name").equals(bucket_name)
- records[0]["s3"].should.have.key("configurationId").equals("s3eventtriggerslambda")
- records[0]["s3"].should.have.key("object")
- records[0]["s3"]["object"].should.have.key("eTag").equals(
- "61ea96c3c8d2c76fc5a42bfccb6affd9"
- )
- records[0]["s3"]["object"].should.have.key("key").equals("keyname")
- records[0]["s3"]["object"].should.have.key("size").equals(15)
+ assert len(records) == 1
+ assert records[0]["awsRegion"] == REGION_NAME
+ assert records[0]["eventName"] == actual_event
+ assert records[0]["eventSource"] == "aws:s3"
+ assert "eventTime" in records[0]
+ assert "s3" in records[0]
+ assert "bucket" in records[0]["s3"]
+ assert records[0]["s3"]["bucket"]["arn"] == f"arn:aws:s3:::{bucket_name}"
+ assert records[0]["s3"]["bucket"]["name"] == bucket_name
+ assert records[0]["s3"]["configurationId"] == "s3eventtriggerslambda"
+ assert "object" in records[0]["s3"]
+ assert records[0]["s3"]["object"]["eTag"] == "61ea96c3c8d2c76fc5a42bfccb6affd9"
+ assert records[0]["s3"]["object"]["key"] == "keyname"
+ assert records[0]["s3"]["object"]["size"] == 15
@mock_logs
@@ -138,8 +136,8 @@ def test_objectcreated_put__unknown_lambda_is_handled_gracefully():
# The object was persisted successfully
resp = s3_client.get_object(Bucket=bucket_name, Key="keyname")
- resp.should.have.key("ContentLength").equal(15)
- resp["Body"].read().should.equal(b"bodyofnewobject")
+ assert resp["ContentLength"] == 15
+ assert resp["Body"].read() == b"bodyofnewobject"
@mock_s3
@@ -175,12 +173,12 @@ def test_object_copy__sends_to_queue():
# We should have received a test event now
messages = sqs_client.receive_message(QueueUrl=queue_url)["Messages"]
- messages.should.have.length_of(1)
+ assert len(messages) == 1
message = json.loads(messages[0]["Body"])
- message.should.have.key("Service").equals("Amazon S3")
- message.should.have.key("Event").equals("s3:TestEvent")
- message.should.have.key("Time")
- message.should.have.key("Bucket").equals(bucket_name)
+ assert message["Service"] == "Amazon S3"
+ assert message["Event"] == "s3:TestEvent"
+ assert "Time" in message
+ assert message["Bucket"] == bucket_name
# Copy an Object
s3_client.put_object(Bucket=bucket_name, Key="keyname", Body="bodyofnewobject")
@@ -190,27 +188,23 @@ def test_object_copy__sends_to_queue():
# Read SQS messages - we should have the Copy-event here
resp = sqs_client.receive_message(QueueUrl=queue_url)
- resp.should.have.key("Messages").length_of(1)
+ assert len(resp["Messages"]) == 1
records = json.loads(resp["Messages"][0]["Body"])["Records"]
- records.should.have.length_of(1)
- records[0].should.have.key("awsRegion").equals(REGION_NAME)
- records[0].should.have.key("eventName").equals("ObjectCreated:Copy")
- records[0].should.have.key("eventSource").equals("aws:s3")
- records[0].should.have.key("eventTime")
- records[0].should.have.key("s3")
- records[0]["s3"].should.have.key("bucket")
- records[0]["s3"]["bucket"].should.have.key("arn").equals(
- f"arn:aws:s3:::{bucket_name}"
- )
- records[0]["s3"]["bucket"].should.have.key("name").equals(bucket_name)
- records[0]["s3"].should.have.key("configurationId").equals("queue_config")
- records[0]["s3"].should.have.key("object")
- records[0]["s3"]["object"].should.have.key("eTag").equals(
- "61ea96c3c8d2c76fc5a42bfccb6affd9"
- )
- records[0]["s3"]["object"].should.have.key("key").equals("key2")
- records[0]["s3"]["object"].should.have.key("size").equals(15)
+ assert len(records) == 1
+ assert records[0]["awsRegion"] == REGION_NAME
+ assert records[0]["eventName"] == "ObjectCreated:Copy"
+ assert records[0]["eventSource"] == "aws:s3"
+ assert "eventTime" in records[0]
+ assert "s3" in records[0]
+ assert "bucket" in records[0]["s3"]
+ assert records[0]["s3"]["bucket"]["arn"] == f"arn:aws:s3:::{bucket_name}"
+ assert records[0]["s3"]["bucket"]["name"] == bucket_name
+ assert records[0]["s3"]["configurationId"] == "queue_config"
+ assert "object" in records[0]["s3"]
+ assert records[0]["s3"]["object"]["eTag"] == "61ea96c3c8d2c76fc5a42bfccb6affd9"
+ assert records[0]["s3"]["object"]["key"] == "key2"
+ assert records[0]["s3"]["object"]["size"] == 15
@mock_s3
@@ -260,34 +254,34 @@ def test_object_put__sends_to_queue__using_filter():
# Read the test-event
resp = queue.receive_messages()
- [m.delete() for m in resp]
+ _ = [m.delete() for m in resp]
# Create an Object that does not meet any filter
s3_client.put_object(Bucket=bucket_name, Key="bb", Body="sth")
messages = queue.receive_messages()
- messages.should.have.length_of(0)
- [m.delete() for m in messages]
+ assert not messages
+ _ = [m.delete() for m in messages]
# Create an Object that does meet the filter - using the prefix only
s3_client.put_object(Bucket=bucket_name, Key="aafilter", Body="sth")
messages = queue.receive_messages()
- messages.should.have.length_of(1)
- [m.delete() for m in messages]
+ assert len(messages) == 1
+ _ = [m.delete() for m in messages]
# Create an Object that does meet the filter - using the prefix + suffix
s3_client.put_object(Bucket=bucket_name, Key="image/yes.jpg", Body="img")
messages = queue.receive_messages()
- messages.should.have.length_of(1)
- [m.delete() for m in messages]
+ assert len(messages) == 1
+ _ = [m.delete() for m in messages]
# Create an Object that does not meet the filter - only the prefix
s3_client.put_object(Bucket=bucket_name, Key="image/no.gif", Body="img")
messages = queue.receive_messages()
- messages.should.have.length_of(0)
- [m.delete() for m in messages]
+ assert not messages
+ _ = [m.delete() for m in messages]
# Create an Object that does not meet the filter - only the suffix
s3_client.put_object(Bucket=bucket_name, Key="nonimages/yes.jpg", Body="img")
messages = queue.receive_messages()
- messages.should.have.length_of(0)
- [m.delete() for m in messages]
+ assert not messages
+ _ = [m.delete() for m in messages]
diff --git a/tests/test_s3/test_s3_lifecycle.py b/tests/test_s3/test_s3_lifecycle.py
index 15dee4a39..19bcbea2d 100644
--- a/tests/test_s3/test_s3_lifecycle.py
+++ b/tests/test_s3/test_s3_lifecycle.py
@@ -1,8 +1,7 @@
-import boto3
-
-import sure # noqa # pylint: disable=unused-import
-from botocore.exceptions import ClientError
from datetime import datetime
+
+import boto3
+from botocore.exceptions import ClientError
import pytest
from moto import mock_s3
@@ -489,9 +488,9 @@ def test_lifecycle_with_aimu():
@mock_s3
def test_lifecycle_with_glacier_transition_boto3():
- s3 = boto3.resource("s3", region_name="us-east-1")
+ s3_resource = boto3.resource("s3", region_name="us-east-1")
client = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
client.put_bucket_lifecycle_configuration(
Bucket="foobar",
@@ -508,24 +507,24 @@ def test_lifecycle_with_glacier_transition_boto3():
)
response = client.get_bucket_lifecycle_configuration(Bucket="foobar")
- response.should.have.key("Rules")
+ assert "Rules" in response
rules = response["Rules"]
- rules.should.have.length_of(1)
- rules[0].should.have.key("ID").equal("myid")
+ assert len(rules) == 1
+ assert rules[0]["ID"] == "myid"
transition = rules[0]["Transitions"][0]
- transition["Days"].should.equal(30)
- transition["StorageClass"].should.equal("GLACIER")
- transition.shouldnt.have.key("Date")
+ assert transition["Days"] == 30
+ assert transition["StorageClass"] == "GLACIER"
+ assert "Date" not in transition
@mock_s3
def test_lifecycle_multi_boto3():
- s3 = boto3.resource("s3", region_name="us-east-1")
+ s3_resource = boto3.resource("s3", region_name="us-east-1")
client = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
date = "2022-10-12T00:00:00.000Z"
- sc = "GLACIER"
+ storage_class = "GLACIER"
client.put_bucket_lifecycle_configuration(
Bucket="foobar",
@@ -565,37 +564,36 @@ def test_lifecycle_multi_boto3():
for rule in rules:
if rule["ID"] == "1":
- rule["Prefix"].should.equal("1/")
- rule.shouldnt.have.key("Expiration")
+ assert rule["Prefix"] == "1/"
+ assert "Expiration" not in rule
elif rule["ID"] == "2":
- rule["Prefix"].should.equal("2/")
- rule["Expiration"]["Days"].should.equal(2)
+ assert rule["Prefix"] == "2/"
+ assert rule["Expiration"]["Days"] == 2
elif rule["ID"] == "3":
- rule["Prefix"].should.equal("3/")
- rule["Expiration"]["Date"].should.be.a(datetime)
- rule["Expiration"]["Date"].strftime("%Y-%m-%dT%H:%M:%S.000Z").should.equal(
- date
- )
+ assert rule["Prefix"] == "3/"
+ assert isinstance(rule["Expiration"]["Date"], datetime)
+ assert rule["Expiration"]["Date"].strftime("%Y-%m-%dT%H:%M:%S.000Z") == date
elif rule["ID"] == "4":
- rule["Prefix"].should.equal("4/")
- rule["Transitions"][0]["Days"].should.equal(4)
- rule["Transitions"][0]["StorageClass"].should.equal(sc)
+ assert rule["Prefix"] == "4/"
+ assert rule["Transitions"][0]["Days"] == 4
+ assert rule["Transitions"][0]["StorageClass"] == storage_class
elif rule["ID"] == "5":
- rule["Prefix"].should.equal("5/")
- rule["Transitions"][0]["Date"].should.be.a(datetime)
- rule["Transitions"][0]["Date"].strftime(
- "%Y-%m-%dT%H:%M:%S.000Z"
- ).should.equal(date)
- rule["Transitions"][0]["StorageClass"].should.equal(sc)
+ assert rule["Prefix"] == "5/"
+ assert isinstance(rule["Transitions"][0]["Date"], datetime)
+ assert (
+ rule["Transitions"][0]["Date"].strftime("%Y-%m-%dT%H:%M:%S.000Z")
+ == date
+ )
+ assert rule["Transitions"][0]["StorageClass"] == storage_class
else:
assert False, "Invalid rule id"
@mock_s3
def test_lifecycle_delete_boto3():
- s3 = boto3.resource("s3", region_name="us-east-1")
+ s3_resource = boto3.resource("s3", region_name="us-east-1")
client = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
client.put_bucket_lifecycle_configuration(
Bucket="foobar",
@@ -608,7 +606,7 @@ def test_lifecycle_delete_boto3():
with pytest.raises(ClientError) as ex:
client.get_bucket_lifecycle_configuration(Bucket="foobar")
- ex.value.response["Error"]["Code"].should.equal("NoSuchLifecycleConfiguration")
- ex.value.response["Error"]["Message"].should.equal(
+ assert ex.value.response["Error"]["Code"] == "NoSuchLifecycleConfiguration"
+ assert ex.value.response["Error"]["Message"] == (
"The lifecycle configuration does not exist"
)
diff --git a/tests/test_s3/test_s3_lock.py b/tests/test_s3/test_s3_lock.py
index 2a02fd512..c7d9094f1 100644
--- a/tests/test_s3/test_s3_lock.py
+++ b/tests/test_s3/test_s3_lock.py
@@ -1,26 +1,27 @@
-import time
-import boto3
import datetime
-import pytest
-from moto import mock_s3
+import time
+
+import boto3
from botocore.config import Config
from botocore.client import ClientError
+import pytest
+
+from moto import mock_s3
from moto.s3.responses import DEFAULT_REGION_NAME
-import sure # noqa # pylint: disable=unused-import
@mock_s3
def test_locked_object():
- s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
+ s3_client = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "locked-bucket-test"
key_name = "file.txt"
seconds_lock = 2
- s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
+ s3_client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
until = datetime.datetime.utcnow() + datetime.timedelta(0, seconds_lock)
- s3.put_object(
+ s3_client.put_object(
Bucket=bucket_name,
Body=b"test",
Key=key_name,
@@ -28,22 +29,22 @@ def test_locked_object():
ObjectLockRetainUntilDate=until,
)
- versions_response = s3.list_object_versions(Bucket=bucket_name)
+ versions_response = s3_client.list_object_versions(Bucket=bucket_name)
version_id = versions_response["Versions"][0]["VersionId"]
deleted = False
try:
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
deleted = True
- except ClientError as e:
- e.response["Error"]["Code"].should.equal("AccessDenied")
+ except ClientError as exc:
+ assert exc.response["Error"]["Code"] == "AccessDenied"
- deleted.should.equal(False)
+ assert deleted is False
# cleaning
time.sleep(seconds_lock)
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
- s3.delete_bucket(Bucket=bucket_name)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
+ s3_client.delete_bucket(Bucket=bucket_name)
@mock_s3
@@ -52,44 +53,44 @@ def test_fail_locked_object():
key_name = "file.txt"
seconds_lock = 2
- s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
+ s3_client = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
- s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=False)
+ s3_client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=False)
until = datetime.datetime.utcnow() + datetime.timedelta(0, seconds_lock)
failed = False
try:
- s3.put_object(
+ s3_client.put_object(
Bucket=bucket_name,
Body=b"test",
Key=key_name,
ObjectLockMode="COMPLIANCE",
ObjectLockRetainUntilDate=until,
)
- except ClientError as e:
- e.response["Error"]["Code"].should.equal("InvalidRequest")
+ except ClientError as exc:
+ assert exc.response["Error"]["Code"] == "InvalidRequest"
failed = True
- failed.should.equal(True)
- s3.delete_bucket(Bucket=bucket_name)
+ assert failed is True
+ s3_client.delete_bucket(Bucket=bucket_name)
@mock_s3
def test_put_object_lock():
- s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
+ s3_client = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "put-lock-bucket-test"
key_name = "file.txt"
seconds_lock = 2
- s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
+ s3_client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
- s3.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
+ s3_client.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
- versions_response = s3.list_object_versions(Bucket=bucket_name)
+ versions_response = s3_client.list_object_versions(Bucket=bucket_name)
version_id = versions_response["Versions"][0]["VersionId"]
until = datetime.datetime.utcnow() + datetime.timedelta(0, seconds_lock)
- s3.put_object_retention(
+ s3_client.put_object_retention(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id,
@@ -98,34 +99,34 @@ def test_put_object_lock():
deleted = False
try:
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
deleted = True
- except ClientError as e:
- e.response["Error"]["Code"].should.equal("AccessDenied")
+ except ClientError as exc:
+ assert exc.response["Error"]["Code"] == "AccessDenied"
- deleted.should.equal(False)
+ assert deleted is False
# cleaning
time.sleep(seconds_lock)
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
- s3.delete_bucket(Bucket=bucket_name)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
+ s3_client.delete_bucket(Bucket=bucket_name)
@mock_s3
def test_put_object_legal_hold():
- s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
+ s3_client = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "put-legal-bucket"
key_name = "file.txt"
- s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
+ s3_client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
- s3.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
+ s3_client.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
- versions_response = s3.list_object_versions(Bucket=bucket_name)
+ versions_response = s3_client.list_object_versions(Bucket=bucket_name)
version_id = versions_response["Versions"][0]["VersionId"]
- s3.put_object_legal_hold(
+ s3_client.put_object_legal_hold(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id,
@@ -134,29 +135,29 @@ def test_put_object_legal_hold():
deleted = False
try:
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
deleted = True
- except ClientError as e:
- e.response["Error"]["Code"].should.equal("AccessDenied")
+ except ClientError as exc:
+ assert exc.response["Error"]["Code"] == "AccessDenied"
- deleted.should.equal(False)
+ assert deleted is False
# cleaning
- s3.put_object_legal_hold(
+ s3_client.put_object_legal_hold(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id,
LegalHold={"Status": "OFF"},
)
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
- s3.delete_bucket(Bucket=bucket_name)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
+ s3_client.delete_bucket(Bucket=bucket_name)
@mock_s3
def test_put_default_lock():
# do not run this test in aws, it will block the deletion for a whole day
- s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
+ s3_client = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "put-default-lock-bucket"
key_name = "file.txt"
@@ -164,8 +165,8 @@ def test_put_default_lock():
mode = "COMPLIANCE"
enabled = "Enabled"
- s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
- s3.put_object_lock_configuration(
+ s3_client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
+ s3_client.put_object_lock_configuration(
Bucket=bucket_name,
ObjectLockConfiguration={
"ObjectLockEnabled": enabled,
@@ -173,43 +174,43 @@ def test_put_default_lock():
},
)
- s3.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
+ s3_client.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
deleted = False
- versions_response = s3.list_object_versions(Bucket=bucket_name)
+ versions_response = s3_client.list_object_versions(Bucket=bucket_name)
version_id = versions_response["Versions"][0]["VersionId"]
try:
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id)
deleted = True
- except ClientError as e:
- e.response["Error"]["Code"].should.equal("AccessDenied")
+ except ClientError as exc:
+ assert exc.response["Error"]["Code"] == "AccessDenied"
- deleted.should.equal(False)
+ assert deleted is False
- response = s3.get_object_lock_configuration(Bucket=bucket_name)
- response["ObjectLockConfiguration"]["ObjectLockEnabled"].should.equal(enabled)
- response["ObjectLockConfiguration"]["Rule"]["DefaultRetention"][
- "Mode"
- ].should.equal(mode)
- response["ObjectLockConfiguration"]["Rule"]["DefaultRetention"][
- "Days"
- ].should.equal(days)
+ response = s3_client.get_object_lock_configuration(Bucket=bucket_name)
+ assert response["ObjectLockConfiguration"]["ObjectLockEnabled"] == enabled
+ assert (
+ response["ObjectLockConfiguration"]["Rule"]["DefaultRetention"]["Mode"] == mode
+ )
+ assert (
+ response["ObjectLockConfiguration"]["Rule"]["DefaultRetention"]["Days"] == days
+ )
@mock_s3
def test_put_object_legal_hold_with_versions():
- s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
+ s3_client = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "put-legal-bucket"
key_name = "file.txt"
- s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
+ s3_client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
- put_obj_1 = s3.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
+ put_obj_1 = s3_client.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
version_id_1 = put_obj_1["VersionId"]
# lock the object with the version, locking the version 1
- s3.put_object_legal_hold(
+ s3_client.put_object_legal_hold(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id_1,
@@ -217,10 +218,10 @@ def test_put_object_legal_hold_with_versions():
)
# put an object on the same key, effectively creating a version 2 of the object
- put_obj_2 = s3.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
+ put_obj_2 = s3_client.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
version_id_2 = put_obj_2["VersionId"]
# also lock the version 2 of the object
- s3.put_object_legal_hold(
+ s3_client.put_object_legal_hold(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id_2,
@@ -228,13 +229,13 @@ def test_put_object_legal_hold_with_versions():
)
# assert that the version 1 is locked
- head_obj_1 = s3.head_object(
+ head_obj_1 = s3_client.head_object(
Bucket=bucket_name, Key=key_name, VersionId=version_id_1
)
assert head_obj_1["ObjectLockLegalHoldStatus"] == "ON"
# remove the lock from the version 1 of the object
- s3.put_object_legal_hold(
+ s3_client.put_object_legal_hold(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id_1,
@@ -242,41 +243,41 @@ def test_put_object_legal_hold_with_versions():
)
# assert that you can now delete the version 1 of the object
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_1)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_1)
- with pytest.raises(ClientError) as e:
- s3.head_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_1)
- assert e.value.response["Error"]["Code"] == "404"
+ with pytest.raises(ClientError) as exc:
+ s3_client.head_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_1)
+ assert exc.value.response["Error"]["Code"] == "404"
# cleaning
- s3.put_object_legal_hold(
+ s3_client.put_object_legal_hold(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id_2,
LegalHold={"Status": "OFF"},
)
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_2)
- s3.delete_bucket(Bucket=bucket_name)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_2)
+ s3_client.delete_bucket(Bucket=bucket_name)
@mock_s3
def test_put_object_lock_with_versions():
- s3 = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
+ s3_client = boto3.client("s3", config=Config(region_name=DEFAULT_REGION_NAME))
bucket_name = "put-lock-bucket-test"
key_name = "file.txt"
seconds_lock = 2
- s3.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
+ s3_client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)
- put_obj_1 = s3.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
+ put_obj_1 = s3_client.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
version_id_1 = put_obj_1["VersionId"]
- put_obj_2 = s3.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
+ put_obj_2 = s3_client.put_object(Bucket=bucket_name, Body=b"test", Key=key_name)
version_id_2 = put_obj_2["VersionId"]
until = datetime.datetime.utcnow() + datetime.timedelta(seconds=seconds_lock)
- s3.put_object_retention(
+ s3_client.put_object_retention(
Bucket=bucket_name,
Key=key_name,
VersionId=version_id_1,
@@ -286,20 +287,22 @@ def test_put_object_lock_with_versions():
# assert that you can delete the locked version 1 of the object
deleted = False
try:
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_1)
+ s3_client.delete_object(
+ Bucket=bucket_name, Key=key_name, VersionId=version_id_1
+ )
deleted = True
- except ClientError as e:
- e.response["Error"]["Code"].should.equal("AccessDenied")
+ except ClientError as exc:
+ assert exc.response["Error"]["Code"] == "AccessDenied"
- deleted.should.equal(False)
+ assert deleted is False
# assert that you can delete the version 2 of the object, not concerned by the lock
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_2)
- with pytest.raises(ClientError) as e:
- s3.head_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_2)
- assert e.value.response["Error"]["Code"] == "404"
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_2)
+ with pytest.raises(ClientError) as exc:
+ s3_client.head_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_2)
+ assert exc.value.response["Error"]["Code"] == "404"
# cleaning
time.sleep(seconds_lock)
- s3.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_1)
- s3.delete_bucket(Bucket=bucket_name)
+ s3_client.delete_object(Bucket=bucket_name, Key=key_name, VersionId=version_id_1)
+ s3_client.delete_bucket(Bucket=bucket_name)
diff --git a/tests/test_s3/test_s3_logging.py b/tests/test_s3/test_s3_logging.py
index 3289b59a5..456489707 100644
--- a/tests/test_s3/test_s3_logging.py
+++ b/tests/test_s3/test_s3_logging.py
@@ -1,34 +1,32 @@
import boto3
from botocore.client import ClientError
-
-from moto.s3.responses import DEFAULT_REGION_NAME
import pytest
-import sure # noqa # pylint: disable=unused-import
-
from moto import mock_s3
+from moto.s3.responses import DEFAULT_REGION_NAME
@mock_s3
def test_put_bucket_logging():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
log_bucket = "logbucket"
wrong_region_bucket = "wrongregionlogbucket"
- s3.create_bucket(Bucket=bucket_name)
- s3.create_bucket(Bucket=log_bucket) # Adding the ACL for log-delivery later...
- s3.create_bucket(
+ s3_client.create_bucket(Bucket=bucket_name)
+ # Adding the ACL for log-delivery later...
+ s3_client.create_bucket(Bucket=log_bucket)
+ s3_client.create_bucket(
Bucket=wrong_region_bucket,
CreateBucketConfiguration={"LocationConstraint": "us-west-2"},
)
# No logging config:
- result = s3.get_bucket_logging(Bucket=bucket_name)
+ result = s3_client.get_bucket_logging(Bucket=bucket_name)
assert not result.get("LoggingEnabled")
# A log-bucket that doesn't exist:
with pytest.raises(ClientError) as err:
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {"TargetBucket": "IAMNOTREAL", "TargetPrefix": ""}
@@ -38,7 +36,7 @@ def test_put_bucket_logging():
# A log-bucket that's missing the proper ACLs for LogDelivery:
with pytest.raises(ClientError) as err:
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {"TargetBucket": log_bucket, "TargetPrefix": ""}
@@ -48,9 +46,9 @@ def test_put_bucket_logging():
assert "log-delivery" in err.value.response["Error"]["Message"]
# Add the proper "log-delivery" ACL to the log buckets:
- bucket_owner = s3.get_bucket_acl(Bucket=log_bucket)["Owner"]
+ bucket_owner = s3_client.get_bucket_acl(Bucket=log_bucket)["Owner"]
for bucket in [log_bucket, wrong_region_bucket]:
- s3.put_bucket_acl(
+ s3_client.put_bucket_acl(
Bucket=bucket,
AccessControlPolicy={
"Grants": [
@@ -79,7 +77,7 @@ def test_put_bucket_logging():
# A log-bucket that's in the wrong region:
with pytest.raises(ClientError) as err:
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {
@@ -91,7 +89,7 @@ def test_put_bucket_logging():
assert err.value.response["Error"]["Code"] == "CrossLocationLoggingProhibitted"
# Correct logging:
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {
@@ -100,17 +98,17 @@ def test_put_bucket_logging():
}
},
)
- result = s3.get_bucket_logging(Bucket=bucket_name)
+ result = s3_client.get_bucket_logging(Bucket=bucket_name)
assert result["LoggingEnabled"]["TargetBucket"] == log_bucket
assert result["LoggingEnabled"]["TargetPrefix"] == f"{bucket_name}/"
assert not result["LoggingEnabled"].get("TargetGrants")
# And disabling:
- s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={})
- assert not s3.get_bucket_logging(Bucket=bucket_name).get("LoggingEnabled")
+ s3_client.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={})
+ assert not s3_client.get_bucket_logging(Bucket=bucket_name).get("LoggingEnabled")
# And enabling with multiple target grants:
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {
@@ -136,7 +134,7 @@ def test_put_bucket_logging():
},
)
- result = s3.get_bucket_logging(Bucket=bucket_name)
+ result = s3_client.get_bucket_logging(Bucket=bucket_name)
assert len(result["LoggingEnabled"]["TargetGrants"]) == 2
assert (
result["LoggingEnabled"]["TargetGrants"][0]["Grantee"]["ID"]
@@ -144,7 +142,7 @@ def test_put_bucket_logging():
)
# Test with just 1 grant:
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {
@@ -162,12 +160,12 @@ def test_put_bucket_logging():
}
},
)
- result = s3.get_bucket_logging(Bucket=bucket_name)
+ result = s3_client.get_bucket_logging(Bucket=bucket_name)
assert len(result["LoggingEnabled"]["TargetGrants"]) == 1
# With an invalid grant:
with pytest.raises(ClientError) as err:
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {
@@ -176,7 +174,10 @@ def test_put_bucket_logging():
"TargetGrants": [
{
"Grantee": {
- "ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274",
+ "ID": (
+ "SOMEIDSTRINGHERE9238748923734823917498"
+ "237489237409123840983274"
+ ),
"Type": "CanonicalUser",
},
"Permission": "NOTAREALPERM",
@@ -191,15 +192,15 @@ def test_put_bucket_logging():
@mock_s3
def test_log_file_is_created():
# Create necessary buckets
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
log_bucket = "logbucket"
- s3.create_bucket(Bucket=bucket_name)
- s3.create_bucket(Bucket=log_bucket)
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=log_bucket)
# Enable logging
- bucket_owner = s3.get_bucket_acl(Bucket=log_bucket)["Owner"]
- s3.put_bucket_acl(
+ bucket_owner = s3_client.get_bucket_acl(Bucket=log_bucket)["Owner"]
+ s3_client.put_bucket_acl(
Bucket=log_bucket,
AccessControlPolicy={
"Grants": [
@@ -225,7 +226,7 @@ def test_log_file_is_created():
"Owner": bucket_owner,
},
)
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {
@@ -236,27 +237,27 @@ def test_log_file_is_created():
)
# Make some requests against the source bucket
- s3.put_object(Bucket=bucket_name, Key="key1", Body=b"")
- s3.put_object(Bucket=bucket_name, Key="key2", Body=b"data")
+ s3_client.put_object(Bucket=bucket_name, Key="key1", Body=b"")
+ s3_client.put_object(Bucket=bucket_name, Key="key2", Body=b"data")
- s3.put_bucket_logging(
+ s3_client.put_bucket_logging(
Bucket=bucket_name,
BucketLoggingStatus={
"LoggingEnabled": {"TargetBucket": log_bucket, "TargetPrefix": ""}
},
)
- s3.list_objects_v2(Bucket=bucket_name)
+ s3_client.list_objects_v2(Bucket=bucket_name)
# Verify files are created in the target (logging) bucket
- keys = [k["Key"] for k in s3.list_objects_v2(Bucket=log_bucket)["Contents"]]
- [k for k in keys if k.startswith("mybucket/")].should.have.length_of(3)
- [k for k in keys if not k.startswith("mybucket/")].should.have.length_of(1)
+ keys = [k["Key"] for k in s3_client.list_objects_v2(Bucket=log_bucket)["Contents"]]
+ assert len([k for k in keys if k.startswith("mybucket/")]) == 3
+ assert len([k for k in keys if not k.startswith("mybucket/")]) == 1
# Verify (roughly) files have the correct content
contents = [
- s3.get_object(Bucket=log_bucket, Key=key)["Body"].read().decode("utf-8")
+ s3_client.get_object(Bucket=log_bucket, Key=key)["Body"].read().decode("utf-8")
for key in keys
]
- assert any([c for c in contents if bucket_name in c])
- assert any([c for c in contents if "REST.GET.BUCKET" in c])
- assert any([c for c in contents if "REST.PUT.BUCKET" in c])
+ assert any(c for c in contents if bucket_name in c)
+ assert any(c for c in contents if "REST.GET.BUCKET" in c)
+ assert any(c for c in contents if "REST.PUT.BUCKET" in c)
diff --git a/tests/test_s3/test_s3_metadata.py b/tests/test_s3/test_s3_metadata.py
index 3f5c521b9..0cfedd0ef 100644
--- a/tests/test_s3/test_s3_metadata.py
+++ b/tests/test_s3/test_s3_metadata.py
@@ -3,25 +3,23 @@ import boto3
from moto import mock_s3
from moto.s3.responses import DEFAULT_REGION_NAME
-import sure # noqa # pylint: disable=unused-import
-
@mock_s3
def test_s3_returns_requestid():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- resp = s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ resp = s3_client.create_bucket(Bucket="mybucket")
_check_metadata(resp)
- resp = s3.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome")
+ resp = s3_client.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome")
_check_metadata(resp)
- resp = s3.get_object(Bucket="mybucket", Key="steve")
+ resp = s3_client.get_object(Bucket="mybucket", Key="steve")
_check_metadata(resp)
def _check_metadata(resp):
meta = resp["ResponseMetadata"]
headers = meta["HTTPHeaders"]
- meta.should.have.key("RequestId")
- headers.should.have.key("x-amzn-requestid")
- meta["RequestId"].should.equal(headers["x-amzn-requestid"])
+ assert "RequestId" in meta
+ assert "x-amzn-requestid" in headers
+ assert meta["RequestId"] == headers["x-amzn-requestid"]
diff --git a/tests/test_s3/test_s3_multipart.py b/tests/test_s3/test_s3_multipart.py
index fb511e76a..8cfe6e89d 100644
--- a/tests/test_s3/test_s3_multipart.py
+++ b/tests/test_s3/test_s3_multipart.py
@@ -1,18 +1,16 @@
-import boto3
-import os
-import pytest
-import sure # noqa # pylint: disable=unused-import
-import requests
-
-from botocore.client import ClientError
from functools import wraps
from io import BytesIO
+import os
+import re
-from moto.s3.responses import DEFAULT_REGION_NAME
-
+import boto3
+from botocore.client import ClientError
+import pytest
+import requests
from moto import settings, mock_s3
import moto.s3.models as s3model
+from moto.s3.responses import DEFAULT_REGION_NAME
from moto.settings import get_s3_default_key_buffer_size, S3_UPLOAD_PART_MIN_SIZE
if settings.TEST_SERVER_MODE:
@@ -23,17 +21,15 @@ else:
EXPECTED_ETAG = '"66d1a1a2ed08fd05c137f316af4ff255-2"'
-def reduced_min_part_size(f):
- """speed up tests by temporarily making the multipart minimum part size
- small
- """
+def reduced_min_part_size(func):
+ """Speed up tests by temporarily making multipart min. part size small."""
orig_size = S3_UPLOAD_PART_MIN_SIZE
- @wraps(f)
+ @wraps(func)
def wrapped(*args, **kwargs):
try:
s3model.S3_UPLOAD_PART_MIN_SIZE = REDUCED_PART_SIZE
- return f(*args, **kwargs)
+ return func(*args, **kwargs)
finally:
s3model.S3_UPLOAD_PART_MIN_SIZE = orig_size
@@ -49,16 +45,17 @@ def test_default_key_buffer_size():
os.environ["MOTO_S3_DEFAULT_KEY_BUFFER_SIZE"] = "2" # 2 bytes
assert get_s3_default_key_buffer_size() == 2
- fk = s3model.FakeKey("a", os.urandom(1)) # 1 byte string
- assert fk._value_buffer._rolled is False
+ fake_key = s3model.FakeKey("a", os.urandom(1)) # 1 byte string
+ assert fake_key._value_buffer._rolled is False
os.environ["MOTO_S3_DEFAULT_KEY_BUFFER_SIZE"] = "1" # 1 byte
assert get_s3_default_key_buffer_size() == 1
- fk = s3model.FakeKey("a", os.urandom(3)) # 3 byte string
- assert fk._value_buffer._rolled is True
+ fake_key = s3model.FakeKey("a", os.urandom(3)) # 3 byte string
+ assert fake_key._value_buffer._rolled is True
- # if no MOTO_S3_DEFAULT_KEY_BUFFER_SIZE env variable is present the buffer size should be less than
- # S3_UPLOAD_PART_MIN_SIZE to prevent in memory caching of multi part uploads
+ # if no MOTO_S3_DEFAULT_KEY_BUFFER_SIZE env variable is present the
+ # buffer size should be less than S3_UPLOAD_PART_MIN_SIZE to prevent
+ # in memory caching of multi part uploads.
del os.environ["MOTO_S3_DEFAULT_KEY_BUFFER_SIZE"]
assert get_s3_default_key_buffer_size() < S3_UPLOAD_PART_MIN_SIZE
@@ -69,24 +66,24 @@ def test_default_key_buffer_size():
@mock_s3
def test_multipart_upload_too_small():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
- mp = client.create_multipart_upload(Bucket="foobar", Key="the-key")
+ multipart = client.create_multipart_upload(Bucket="foobar", Key="the-key")
up1 = client.upload_part(
Body=BytesIO(b"hello"),
PartNumber=1,
Bucket="foobar",
Key="the-key",
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
up2 = client.upload_part(
Body=BytesIO(b"world"),
PartNumber=2,
Bucket="foobar",
Key="the-key",
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
# Multipart with total size under 5MB is refused
with pytest.raises(ClientError) as ex:
@@ -99,10 +96,10 @@ def test_multipart_upload_too_small():
{"ETag": up2["ETag"], "PartNumber": 2},
]
},
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
- ex.value.response["Error"]["Code"].should.equal("EntityTooSmall")
- ex.value.response["Error"]["Message"].should.equal(
+ assert ex.value.response["Error"]["Code"] == "EntityTooSmall"
+ assert ex.value.response["Error"]["Message"] == (
"Your proposed upload is smaller than the minimum allowed object size."
)
@@ -111,26 +108,26 @@ def test_multipart_upload_too_small():
@mock_s3
@reduced_min_part_size
def test_multipart_upload(key: str):
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
part1 = b"0" * REDUCED_PART_SIZE
part2 = b"1"
- mp = client.create_multipart_upload(Bucket="foobar", Key=key)
+ multipart = client.create_multipart_upload(Bucket="foobar", Key=key)
up1 = client.upload_part(
Body=BytesIO(part1),
PartNumber=1,
Bucket="foobar",
Key=key,
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
up2 = client.upload_part(
Body=BytesIO(part2),
PartNumber=2,
Bucket="foobar",
Key=key,
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
client.complete_multipart_upload(
@@ -142,36 +139,36 @@ def test_multipart_upload(key: str):
{"ETag": up2["ETag"], "PartNumber": 2},
]
},
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
# we should get both parts as the key contents
response = client.get_object(Bucket="foobar", Key=key)
- response["Body"].read().should.equal(part1 + part2)
+ assert response["Body"].read() == part1 + part2
@mock_s3
@reduced_min_part_size
def test_multipart_upload_out_of_order():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
part1 = b"0" * REDUCED_PART_SIZE
part2 = b"1"
- mp = client.create_multipart_upload(Bucket="foobar", Key="the-key")
+ multipart = client.create_multipart_upload(Bucket="foobar", Key="the-key")
up1 = client.upload_part(
Body=BytesIO(part1),
PartNumber=4,
Bucket="foobar",
Key="the-key",
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
up2 = client.upload_part(
Body=BytesIO(part2),
PartNumber=2,
Bucket="foobar",
Key="the-key",
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
client.complete_multipart_upload(
@@ -183,24 +180,24 @@ def test_multipart_upload_out_of_order():
{"ETag": up2["ETag"], "PartNumber": 2},
]
},
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
# we should get both parts as the key contents
response = client.get_object(Bucket="foobar", Key="the-key")
- response["Body"].read().should.equal(part1 + part2)
+ assert response["Body"].read() == part1 + part2
@mock_s3
@reduced_min_part_size
def test_multipart_upload_with_headers():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "fancymultiparttest"
key_name = "the-key"
- s3.create_bucket(Bucket=bucket_name)
+ s3_resource.create_bucket(Bucket=bucket_name)
part1 = b"0" * REDUCED_PART_SIZE
- mp = client.create_multipart_upload(
+ multipart = client.create_multipart_upload(
Bucket=bucket_name,
Key=key_name,
Metadata={"meta": "data"},
@@ -212,31 +209,29 @@ def test_multipart_upload_with_headers():
PartNumber=1,
Bucket=bucket_name,
Key=key_name,
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
client.complete_multipart_upload(
Bucket=bucket_name,
Key=key_name,
MultipartUpload={"Parts": [{"ETag": up1["ETag"], "PartNumber": 1}]},
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
# we should get both parts as the key contents
response = client.get_object(Bucket=bucket_name, Key=key_name)
- response["Metadata"].should.equal({"meta": "data"})
- response["StorageClass"].should.equal("STANDARD_IA")
+ assert response["Metadata"] == {"meta": "data"}
+ assert response["StorageClass"] == "STANDARD_IA"
grants = client.get_object_acl(Bucket=bucket_name, Key=key_name)["Grants"]
- grants.should.have.length_of(2)
- grants.should.contain(
- {
- "Grantee": {
- "Type": "Group",
- "URI": "http://acs.amazonaws.com/groups/global/AuthenticatedUsers",
- },
- "Permission": "READ",
- }
- )
+ assert len(grants) == 2
+ assert {
+ "Grantee": {
+ "Type": "Group",
+ "URI": "http://acs.amazonaws.com/groups/global/AuthenticatedUsers",
+ },
+ "Permission": "READ",
+ } in grants
@pytest.mark.parametrize(
@@ -251,20 +246,20 @@ def test_multipart_upload_with_headers():
@mock_s3
@reduced_min_part_size
def test_multipart_upload_with_copy_key(original_key_name):
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
- s3.put_object(Bucket="foobar", Key=original_key_name, Body="key_value")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="foobar")
+ s3_client.put_object(Bucket="foobar", Key=original_key_name, Body="key_value")
- mpu = s3.create_multipart_upload(Bucket="foobar", Key="the-key")
+ mpu = s3_client.create_multipart_upload(Bucket="foobar", Key="the-key")
part1 = b"0" * REDUCED_PART_SIZE
- up1 = s3.upload_part(
+ up1 = s3_client.upload_part(
Bucket="foobar",
Key="the-key",
PartNumber=1,
UploadId=mpu["UploadId"],
Body=BytesIO(part1),
)
- up2 = s3.upload_part_copy(
+ up2 = s3_client.upload_part_copy(
Bucket="foobar",
Key="the-key",
CopySource={"Bucket": "foobar", "Key": original_key_name},
@@ -272,7 +267,7 @@ def test_multipart_upload_with_copy_key(original_key_name):
PartNumber=2,
UploadId=mpu["UploadId"],
)
- s3.complete_multipart_upload(
+ s3_client.complete_multipart_upload(
Bucket="foobar",
Key="the-key",
MultipartUpload={
@@ -283,19 +278,19 @@ def test_multipart_upload_with_copy_key(original_key_name):
},
UploadId=mpu["UploadId"],
)
- response = s3.get_object(Bucket="foobar", Key="the-key")
- response["Body"].read().should.equal(part1 + b"key_")
+ response = s3_client.get_object(Bucket="foobar", Key="the-key")
+ assert response["Body"].read() == part1 + b"key_"
@mock_s3
@reduced_min_part_size
def test_multipart_upload_cancel():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="foobar")
- mpu = s3.create_multipart_upload(Bucket="foobar", Key="the-key")
+ mpu = s3_client.create_multipart_upload(Bucket="foobar", Key="the-key")
part1 = b"0" * REDUCED_PART_SIZE
- s3.upload_part(
+ s3_client.upload_part(
Bucket="foobar",
Key="the-key",
PartNumber=1,
@@ -303,25 +298,27 @@ def test_multipart_upload_cancel():
Body=BytesIO(part1),
)
- uploads = s3.list_multipart_uploads(Bucket="foobar")["Uploads"]
- uploads.should.have.length_of(1)
- uploads[0]["Key"].should.equal("the-key")
+ uploads = s3_client.list_multipart_uploads(Bucket="foobar")["Uploads"]
+ assert len(uploads) == 1
+ assert uploads[0]["Key"] == "the-key"
- s3.abort_multipart_upload(Bucket="foobar", Key="the-key", UploadId=mpu["UploadId"])
+ s3_client.abort_multipart_upload(
+ Bucket="foobar", Key="the-key", UploadId=mpu["UploadId"]
+ )
- s3.list_multipart_uploads(Bucket="foobar").shouldnt.have.key("Uploads")
+ assert "Uploads" not in s3_client.list_multipart_uploads(Bucket="foobar")
@mock_s3
@reduced_min_part_size
def test_multipart_etag_quotes_stripped():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
- s3.put_object(Bucket="foobar", Key="original-key", Body="key_value")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="foobar")
+ s3_client.put_object(Bucket="foobar", Key="original-key", Body="key_value")
- mpu = s3.create_multipart_upload(Bucket="foobar", Key="the-key")
+ mpu = s3_client.create_multipart_upload(Bucket="foobar", Key="the-key")
part1 = b"0" * REDUCED_PART_SIZE
- up1 = s3.upload_part(
+ up1 = s3_client.upload_part(
Bucket="foobar",
Key="the-key",
PartNumber=1,
@@ -329,7 +326,7 @@ def test_multipart_etag_quotes_stripped():
Body=BytesIO(part1),
)
etag1 = up1["ETag"].replace('"', "")
- up2 = s3.upload_part_copy(
+ up2 = s3_client.upload_part_copy(
Bucket="foobar",
Key="the-key",
CopySource={"Bucket": "foobar", "Key": "original-key"},
@@ -338,7 +335,7 @@ def test_multipart_etag_quotes_stripped():
UploadId=mpu["UploadId"],
)
etag2 = up2["CopyPartResult"]["ETag"].replace('"', "")
- s3.complete_multipart_upload(
+ s3_client.complete_multipart_upload(
Bucket="foobar",
Key="the-key",
MultipartUpload={
@@ -349,26 +346,26 @@ def test_multipart_etag_quotes_stripped():
},
UploadId=mpu["UploadId"],
)
- response = s3.get_object(Bucket="foobar", Key="the-key")
- response["Body"].read().should.equal(part1 + b"key_")
+ response = s3_client.get_object(Bucket="foobar", Key="the-key")
+ assert response["Body"].read() == part1 + b"key_"
@mock_s3
@reduced_min_part_size
def test_multipart_duplicate_upload():
- s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
+ s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_resource.create_bucket(Bucket="foobar")
part1 = b"0" * REDUCED_PART_SIZE
part2 = b"1"
- mp = client.create_multipart_upload(Bucket="foobar", Key="the-key")
+ multipart = client.create_multipart_upload(Bucket="foobar", Key="the-key")
client.upload_part(
Body=BytesIO(part1),
PartNumber=1,
Bucket="foobar",
Key="the-key",
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
# same part again
up1 = client.upload_part(
@@ -376,14 +373,14 @@ def test_multipart_duplicate_upload():
PartNumber=1,
Bucket="foobar",
Key="the-key",
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
up2 = client.upload_part(
Body=BytesIO(part2),
PartNumber=2,
Bucket="foobar",
Key="the-key",
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
client.complete_multipart_upload(
@@ -395,37 +392,41 @@ def test_multipart_duplicate_upload():
{"ETag": up2["ETag"], "PartNumber": 2},
]
},
- UploadId=mp["UploadId"],
+ UploadId=multipart["UploadId"],
)
# we should get both parts as the key contents
response = client.get_object(Bucket="foobar", Key="the-key")
- response["Body"].read().should.equal(part1 + part2)
+ assert response["Body"].read() == part1 + part2
@mock_s3
def test_list_multiparts():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="foobar")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="foobar")
- mpu1 = s3.create_multipart_upload(Bucket="foobar", Key="one-key")
- mpu2 = s3.create_multipart_upload(Bucket="foobar", Key="two-key")
+ mpu1 = s3_client.create_multipart_upload(Bucket="foobar", Key="one-key")
+ mpu2 = s3_client.create_multipart_upload(Bucket="foobar", Key="two-key")
- uploads = s3.list_multipart_uploads(Bucket="foobar")["Uploads"]
- uploads.should.have.length_of(2)
- {u["Key"]: u["UploadId"] for u in uploads}.should.equal(
+ uploads = s3_client.list_multipart_uploads(Bucket="foobar")["Uploads"]
+ assert len(uploads) == 2
+ assert {u["Key"]: u["UploadId"] for u in uploads} == (
{"one-key": mpu1["UploadId"], "two-key": mpu2["UploadId"]}
)
- s3.abort_multipart_upload(Bucket="foobar", Key="the-key", UploadId=mpu2["UploadId"])
+ s3_client.abort_multipart_upload(
+ Bucket="foobar", Key="the-key", UploadId=mpu2["UploadId"]
+ )
- uploads = s3.list_multipart_uploads(Bucket="foobar")["Uploads"]
- uploads.should.have.length_of(1)
- uploads[0]["Key"].should.equal("one-key")
+ uploads = s3_client.list_multipart_uploads(Bucket="foobar")["Uploads"]
+ assert len(uploads) == 1
+ assert uploads[0]["Key"] == "one-key"
- s3.abort_multipart_upload(Bucket="foobar", Key="the-key", UploadId=mpu1["UploadId"])
+ s3_client.abort_multipart_upload(
+ Bucket="foobar", Key="the-key", UploadId=mpu1["UploadId"]
+ )
- res = s3.list_multipart_uploads(Bucket="foobar")
- res.shouldnt.have.key("Uploads")
+ res = s3_client.list_multipart_uploads(Bucket="foobar")
+ assert "Uploads" not in res
@mock_s3
@@ -442,25 +443,26 @@ def test_multipart_should_throw_nosuchupload_if_there_are_no_parts():
with pytest.raises(ClientError) as ex:
list(multipart_upload.parts.all())
err = ex.value.response["Error"]
- err["Code"].should.equal("NoSuchUpload")
- err["Message"].should.equal(
- "The specified upload does not exist. The upload ID may be invalid, or the upload may have been aborted or completed."
+ assert err["Code"] == "NoSuchUpload"
+ assert err["Message"] == (
+ "The specified upload does not exist. The upload ID may be invalid, "
+ "or the upload may have been aborted or completed."
)
- err["UploadId"].should.equal(multipart_upload.id)
+ assert err["UploadId"] == multipart_upload.id
@mock_s3
def test_multipart_wrong_partnumber():
bucket_name = "mputest-3593"
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
- mpu = s3.create_multipart_upload(Bucket=bucket_name, Key="the-key")
+ mpu = s3_client.create_multipart_upload(Bucket=bucket_name, Key="the-key")
mpu_id = mpu["UploadId"]
body = b"111"
with pytest.raises(ClientError) as ex:
- s3.upload_part(
+ s3_client.upload_part(
Bucket=bucket_name,
Key="the-key",
PartNumber=-1,
@@ -469,9 +471,10 @@ def test_multipart_wrong_partnumber():
ContentLength=len(body),
)
err = ex.value.response["Error"]
- err["Code"].should.equal("NoSuchUpload")
- err["Message"].should.equal(
- "The specified upload does not exist. The upload ID may be invalid, or the upload may have been aborted or completed."
+ assert err["Code"] == "NoSuchUpload"
+ assert err["Message"] == (
+ "The specified upload does not exist. The upload ID may be invalid, "
+ "or the upload may have been aborted or completed."
)
@@ -485,21 +488,21 @@ def test_multipart_upload_with_tags():
client.create_bucket(Bucket=bucket)
response = client.create_multipart_upload(Bucket=bucket, Key=key, Tagging=tags)
- u = boto3.resource("s3").MultipartUpload(bucket, key, response["UploadId"])
+ upload = boto3.resource("s3").MultipartUpload(bucket, key, response["UploadId"])
parts = [
{
- "ETag": u.Part(i).upload(Body=os.urandom(5 * (2**20)))["ETag"],
+ "ETag": upload.Part(i).upload(Body=os.urandom(5 * (2**20)))["ETag"],
"PartNumber": i,
}
for i in range(1, 3)
]
- u.complete(MultipartUpload={"Parts": parts})
+ upload.complete(MultipartUpload={"Parts": parts})
# check tags
response = client.get_object_tagging(Bucket=bucket, Key=key)
actual = {t["Key"]: t["Value"] for t in response.get("TagSet", [])}
- actual.should.equal({"a": "b"})
+ assert actual == {"a": "b"}
@mock_s3
@@ -524,7 +527,7 @@ def test_multipart_upload_should_return_part_10000():
all_parts = s3_client.list_parts(Bucket=bucket, Key=key, UploadId=mpu_id)["Parts"]
part_nrs = [part["PartNumber"] for part in all_parts]
- part_nrs.should.equal([1, 2, 10000])
+ assert part_nrs == [1, 2, 10000]
@mock_s3
@@ -539,7 +542,7 @@ def test_multipart_upload_without_parts():
mpu_id = mpu["UploadId"]
list_parts_result = s3_client.list_parts(Bucket=bucket, Key=key, UploadId=mpu_id)
- list_parts_result["IsTruncated"].should.equal(False)
+ assert list_parts_result["IsTruncated"] is False
@mock_s3
@@ -559,12 +562,12 @@ def test_s3_multipart_upload_cannot_upload_part_over_10000(part_nr):
Bucket=bucket, Key=key, PartNumber=part_nr, UploadId=mpu_id, Body="data"
)
err = exc.value.response["Error"]
- err["Code"].should.equal("InvalidArgument")
- err["Message"].should.equal(
+ assert err["Code"] == "InvalidArgument"
+ assert err["Message"] == (
"Part number must be an integer between 1 and 10000, inclusive"
)
- err["ArgumentName"].should.equal("partNumber")
- err["ArgumentValue"].should.equal(f"{part_nr}")
+ assert err["ArgumentName"] == "partNumber"
+ assert err["ArgumentValue"] == f"{part_nr}"
@mock_s3
@@ -578,25 +581,28 @@ def test_s3_abort_multipart_data_with_invalid_upload_and_key():
Bucket="blah", Key="foobar", UploadId="dummy_upload_id"
)
err = err.value.response["Error"]
- err["Code"].should.equal("NoSuchUpload")
- err["Message"].should.equal(
- "The specified upload does not exist. The upload ID may be invalid, or the upload may have been aborted or completed."
+ assert err["Code"] == "NoSuchUpload"
+ assert err["Message"] == (
+ "The specified upload does not exist. The upload ID may be invalid, "
+ "or the upload may have been aborted or completed."
)
- err["UploadId"].should.equal("dummy_upload_id")
+ assert err["UploadId"] == "dummy_upload_id"
@mock_s3
@reduced_min_part_size
def test_multipart_etag():
# Create Bucket so that test can run
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
- upload_id = s3.create_multipart_upload(Bucket="mybucket", Key="the-key")["UploadId"]
+ upload_id = s3_client.create_multipart_upload(Bucket="mybucket", Key="the-key")[
+ "UploadId"
+ ]
part1 = b"0" * REDUCED_PART_SIZE
etags = []
etags.append(
- s3.upload_part(
+ s3_client.upload_part(
Bucket="mybucket",
Key="the-key",
PartNumber=1,
@@ -607,7 +613,7 @@ def test_multipart_etag():
# last part, can be less than 5 MB
part2 = b"1"
etags.append(
- s3.upload_part(
+ s3_client.upload_part(
Bucket="mybucket",
Key="the-key",
PartNumber=2,
@@ -616,7 +622,7 @@ def test_multipart_etag():
)["ETag"]
)
- s3.complete_multipart_upload(
+ s3_client.complete_multipart_upload(
Bucket="mybucket",
Key="the-key",
UploadId=upload_id,
@@ -627,26 +633,28 @@ def test_multipart_etag():
},
)
# we should get both parts as the key contents
- resp = s3.get_object(Bucket="mybucket", Key="the-key")
- resp["ETag"].should.equal(EXPECTED_ETAG)
+ resp = s3_client.get_object(Bucket="mybucket", Key="the-key")
+ assert resp["ETag"] == EXPECTED_ETAG
@mock_s3
@reduced_min_part_size
def test_multipart_version():
# Create Bucket so that test can run
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
- s3.put_bucket_versioning(
+ s3_client.put_bucket_versioning(
Bucket="mybucket", VersioningConfiguration={"Status": "Enabled"}
)
- upload_id = s3.create_multipart_upload(Bucket="mybucket", Key="the-key")["UploadId"]
+ upload_id = s3_client.create_multipart_upload(Bucket="mybucket", Key="the-key")[
+ "UploadId"
+ ]
part1 = b"0" * REDUCED_PART_SIZE
etags = []
etags.append(
- s3.upload_part(
+ s3_client.upload_part(
Bucket="mybucket",
Key="the-key",
PartNumber=1,
@@ -657,7 +665,7 @@ def test_multipart_version():
# last part, can be less than 5 MB
part2 = b"1"
etags.append(
- s3.upload_part(
+ s3_client.upload_part(
Bucket="mybucket",
Key="the-key",
PartNumber=2,
@@ -665,7 +673,7 @@ def test_multipart_version():
Body=part2,
)["ETag"]
)
- response = s3.complete_multipart_upload(
+ response = s3_client.complete_multipart_upload(
Bucket="mybucket",
Key="the-key",
UploadId=upload_id,
@@ -676,7 +684,7 @@ def test_multipart_version():
},
)
- response["VersionId"].should.match("[-a-z0-9]+")
+ assert re.match("[-a-z0-9]+", response["VersionId"])
@mock_s3
@@ -696,37 +704,39 @@ def test_multipart_version():
],
)
def test_multipart_list_parts_invalid_argument(part_nr, msg, msg2):
- s3 = boto3.client("s3", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucketasdfljoqwerasdfas"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
- mpu = s3.create_multipart_upload(Bucket=bucket_name, Key="the-key")
+ mpu = s3_client.create_multipart_upload(Bucket=bucket_name, Key="the-key")
mpu_id = mpu["UploadId"]
def get_parts(**kwarg):
- s3.list_parts(Bucket=bucket_name, Key="the-key", UploadId=mpu_id, **kwarg)
+ s3_client.list_parts(
+ Bucket=bucket_name, Key="the-key", UploadId=mpu_id, **kwarg
+ )
with pytest.raises(ClientError) as err:
get_parts(**{"MaxParts": part_nr})
- e = err.value.response["Error"]
- e["Code"].should.equal("InvalidArgument")
- e["Message"].should.equal(msg)
+ err_rsp = err.value.response["Error"]
+ assert err_rsp["Code"] == "InvalidArgument"
+ assert err_rsp["Message"] == msg
with pytest.raises(ClientError) as err:
get_parts(**{"PartNumberMarker": part_nr})
- e = err.value.response["Error"]
- e["Code"].should.equal("InvalidArgument")
- e["Message"].should.equal(msg2)
+ err_rsp = err.value.response["Error"]
+ assert err_rsp["Code"] == "InvalidArgument"
+ assert err_rsp["Message"] == msg2
@mock_s3
@reduced_min_part_size
def test_multipart_list_parts():
- s3 = boto3.client("s3", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucketasdfljoqwerasdfas"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
- mpu = s3.create_multipart_upload(Bucket=bucket_name, Key="the-key")
+ mpu = s3_client.create_multipart_upload(Bucket=bucket_name, Key="the-key")
mpu_id = mpu["UploadId"]
parts = []
@@ -736,7 +746,9 @@ def test_multipart_list_parts():
# Get uploaded parts using default values
uploaded_parts = []
- uploaded = s3.list_parts(Bucket=bucket_name, Key="the-key", UploadId=mpu_id)
+ uploaded = s3_client.list_parts(
+ Bucket=bucket_name, Key="the-key", UploadId=mpu_id
+ )
assert uploaded["PartNumberMarker"] == 0
@@ -762,7 +774,7 @@ def test_multipart_list_parts():
uploaded_parts = []
while "there are parts":
- uploaded = s3.list_parts(
+ uploaded = s3_client.list_parts(
Bucket=bucket_name,
Key="the-key",
UploadId=mpu_id,
@@ -800,7 +812,7 @@ def test_multipart_list_parts():
for i in range(1, n_parts + 1):
part_size = REDUCED_PART_SIZE + i
body = b"1" * part_size
- part = s3.upload_part(
+ part = s3_client.upload_part(
Bucket=bucket_name,
Key="the-key",
PartNumber=i,
@@ -818,7 +830,7 @@ def test_multipart_list_parts():
get_parts_all(11)
get_parts_by_batch(11)
- s3.complete_multipart_upload(
+ s3_client.complete_multipart_upload(
Bucket=bucket_name,
Key="the-key",
UploadId=mpu_id,
@@ -829,10 +841,10 @@ def test_multipart_list_parts():
@mock_s3
@reduced_min_part_size
def test_multipart_part_size():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket="mybucket")
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket="mybucket")
- mpu = s3.create_multipart_upload(Bucket="mybucket", Key="the-key")
+ mpu = s3_client.create_multipart_upload(Bucket="mybucket", Key="the-key")
mpu_id = mpu["UploadId"]
parts = []
@@ -840,7 +852,7 @@ def test_multipart_part_size():
for i in range(1, n_parts + 1):
part_size = REDUCED_PART_SIZE + i
body = b"1" * part_size
- part = s3.upload_part(
+ part = s3_client.upload_part(
Bucket="mybucket",
Key="the-key",
PartNumber=i,
@@ -850,7 +862,7 @@ def test_multipart_part_size():
)
parts.append({"PartNumber": i, "ETag": part["ETag"]})
- s3.complete_multipart_upload(
+ s3_client.complete_multipart_upload(
Bucket="mybucket",
Key="the-key",
UploadId=mpu_id,
@@ -858,17 +870,13 @@ def test_multipart_part_size():
)
for i in range(1, n_parts + 1):
- obj = s3.head_object(Bucket="mybucket", Key="the-key", PartNumber=i)
+ obj = s3_client.head_object(Bucket="mybucket", Key="the-key", PartNumber=i)
assert obj["ContentLength"] == REDUCED_PART_SIZE + i
@mock_s3
def test_complete_multipart_with_empty_partlist():
- """
- When completing a MultipartUpload with an empty part list, AWS responds with an InvalidXML-error
-
- Verify that we send the same error, to duplicate boto3's behaviour
- """
+ """Verify InvalidXML-error sent for MultipartUpload with empty part list."""
bucket = "testbucketthatcompletesmultipartuploadwithoutparts"
key = "test-multi-empty"
@@ -883,9 +891,10 @@ def test_complete_multipart_with_empty_partlist():
with pytest.raises(ClientError) as exc:
upload.complete(MultipartUpload={"Parts": []})
err = exc.value.response["Error"]
- err["Code"].should.equal("MalformedXML")
- err["Message"].should.equal(
- "The XML you provided was not well-formed or did not validate against our published schema"
+ assert err["Code"] == "MalformedXML"
+ assert err["Message"] == (
+ "The XML you provided was not well-formed or did not validate "
+ "against our published schema"
)
@@ -969,7 +978,7 @@ def test_generate_presigned_url_on_multipart_upload_without_acl():
"head_object", Params={"Bucket": bucket_name, "Key": object_key}
)
res = requests.get(url)
- res.status_code.should.equal(200)
+ assert res.status_code == 200
@mock_s3
@@ -986,16 +995,16 @@ def test_head_object_returns_part_count():
num_parts = 2
parts = []
- for p in range(1, num_parts + 1):
+ for part in range(1, num_parts + 1):
response = client.upload_part(
- Body=b"x" * (REDUCED_PART_SIZE + p),
+ Body=b"x" * (REDUCED_PART_SIZE + part),
Bucket=bucket,
Key=key,
- PartNumber=p,
+ PartNumber=part,
UploadId=mp_id,
)
- parts.append({"ETag": response["ETag"], "PartNumber": p})
+ parts.append({"ETag": response["ETag"], "PartNumber": part})
client.complete_multipart_upload(
Bucket=bucket,
@@ -1005,8 +1014,8 @@ def test_head_object_returns_part_count():
)
resp = client.head_object(Bucket=bucket, Key=key, PartNumber=1)
- resp.should.have.key("PartsCount").equals(num_parts)
+ assert resp["PartsCount"] == num_parts
# Header is not returned when we do not pass PartNumber
resp = client.head_object(Bucket=bucket, Key=key)
- resp.shouldnt.have.key("PartsCount")
+ assert "PartsCount" not in resp
diff --git a/tests/test_s3/test_s3_object_attributes.py b/tests/test_s3/test_s3_object_attributes.py
index d53a64d61..b2d18528b 100644
--- a/tests/test_s3/test_s3_object_attributes.py
+++ b/tests/test_s3/test_s3_object_attributes.py
@@ -1,16 +1,18 @@
+from uuid import uuid4
+
import boto3
import pytest
+
from moto import mock_s3
-from uuid import uuid4
@mock_s3
class TestS3ObjectAttributes:
def setup_method(self, *args) -> None: # pylint: disable=unused-argument
self.bucket_name = str(uuid4())
- self.s3 = boto3.resource("s3", region_name="us-east-1")
+ self.s3_resource = boto3.resource("s3", region_name="us-east-1")
self.client = boto3.client("s3", region_name="us-east-1")
- self.bucket = self.s3.Bucket(self.bucket_name)
+ self.bucket = self.s3_resource.Bucket(self.bucket_name)
self.bucket.create()
self.key = self.bucket.put_object(Key="mykey", Body=b"somedata")
diff --git a/tests/test_s3/test_s3_ownership.py b/tests/test_s3/test_s3_ownership.py
index bc68eb892..05840aeb0 100644
--- a/tests/test_s3/test_s3_ownership.py
+++ b/tests/test_s3/test_s3_ownership.py
@@ -1,8 +1,7 @@
import boto3
from botocore.client import ClientError
-
import pytest
-import sure # noqa # pylint: disable=unused-import
+
from moto import mock_s3
@@ -14,7 +13,7 @@ def test_create_bucket_with_ownership():
client.create_bucket(Bucket=bucket, ObjectOwnership=ownership)
response = client.get_bucket_ownership_controls(Bucket=bucket)
- response["OwnershipControls"]["Rules"][0]["ObjectOwnership"].should.equal(ownership)
+ assert response["OwnershipControls"]["Rules"][0]["ObjectOwnership"] == ownership
@mock_s3
@@ -29,7 +28,7 @@ def test_put_ownership_to_bucket():
)
response = client.get_bucket_ownership_controls(Bucket=bucket)
- response["OwnershipControls"]["Rules"][0]["ObjectOwnership"].should.equal(ownership)
+ assert response["OwnershipControls"]["Rules"][0]["ObjectOwnership"] == ownership
@mock_s3
@@ -44,7 +43,7 @@ def test_delete_ownership_from_bucket():
with pytest.raises(ClientError) as ex:
client.get_bucket_ownership_controls(Bucket=bucket)
- ex.value.response["Error"]["Code"].should.equal("OwnershipControlsNotFoundError")
- ex.value.response["Error"]["Message"].should.equal(
+ assert ex.value.response["Error"]["Code"] == "OwnershipControlsNotFoundError"
+ assert ex.value.response["Error"]["Message"] == (
"The bucket ownership controls were not found"
)
diff --git a/tests/test_s3/test_s3_replication.py b/tests/test_s3/test_s3_replication.py
index 3c162c1b8..0f70bca8c 100644
--- a/tests/test_s3/test_s3_replication.py
+++ b/tests/test_s3/test_s3_replication.py
@@ -1,70 +1,70 @@
-import boto3
-import pytest
-import sure # noqa # pylint: disable=unused-import
-
-from botocore.exceptions import ClientError
-from moto import mock_s3
from uuid import uuid4
+import boto3
+from botocore.exceptions import ClientError
+import pytest
+
+from moto import mock_s3
+
DEFAULT_REGION_NAME = "us-east-1"
@mock_s3
def test_get_bucket_replication_for_unexisting_bucket():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
with pytest.raises(ClientError) as exc:
- s3.get_bucket_replication(Bucket=bucket_name)
+ s3_client.get_bucket_replication(Bucket=bucket_name)
err = exc.value.response["Error"]
- err["Code"].should.equal("NoSuchBucket")
- err["Message"].should.equal("The specified bucket does not exist")
- err["BucketName"].should.equal(bucket_name)
+ assert err["Code"] == "NoSuchBucket"
+ assert err["Message"] == "The specified bucket does not exist"
+ assert err["BucketName"] == bucket_name
@mock_s3
def test_get_bucket_replication_bucket_without_replication():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
with pytest.raises(ClientError) as exc:
- s3.get_bucket_replication(Bucket=bucket_name)
+ s3_client.get_bucket_replication(Bucket=bucket_name)
err = exc.value.response["Error"]
- err["Code"].should.equal("ReplicationConfigurationNotFoundError")
- err["Message"].should.equal("The replication configuration was not found")
- err["BucketName"].should.equal(bucket_name)
+ assert err["Code"] == "ReplicationConfigurationNotFoundError"
+ assert err["Message"] == "The replication configuration was not found"
+ assert err["BucketName"] == bucket_name
@mock_s3
def test_delete_bucket_replication_unknown_bucket():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
with pytest.raises(ClientError) as exc:
- s3.delete_bucket_replication(Bucket=bucket_name)
+ s3_client.delete_bucket_replication(Bucket=bucket_name)
err = exc.value.response["Error"]
- err["Code"].should.equal("NoSuchBucket")
- err["Message"].should.equal("The specified bucket does not exist")
- err["BucketName"].should.equal(bucket_name)
+ assert err["Code"] == "NoSuchBucket"
+ assert err["Message"] == "The specified bucket does not exist"
+ assert err["BucketName"] == bucket_name
@mock_s3
def test_delete_bucket_replication_bucket_without_replication():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
# No-op
- s3.delete_bucket_replication(Bucket=bucket_name)
+ s3_client.delete_bucket_replication(Bucket=bucket_name)
@mock_s3
def test_create_replication_without_versioning():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client.create_bucket(Bucket=bucket_name)
with pytest.raises(ClientError) as exc:
- s3.put_bucket_replication(
+ s3_client.put_bucket_replication(
Bucket=bucket_name,
ReplicationConfiguration={
"Role": "myrole",
@@ -74,23 +74,23 @@ def test_create_replication_without_versioning():
},
)
err = exc.value.response["Error"]
- err["Code"].should.equal("InvalidRequest")
- err["Message"].should.equal(
+ assert err["Code"] == "InvalidRequest"
+ assert err["Message"] == (
"Versioning must be 'Enabled' on the bucket to apply a replication configuration"
)
- err["BucketName"].should.equal(bucket_name)
+ assert err["BucketName"] == bucket_name
@mock_s3
def test_create_and_retrieve_replication_with_single_rules():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_versioning(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_versioning(
Bucket=bucket_name, VersioningConfiguration={"Status": "Enabled"}
)
- s3.put_bucket_replication(
+ s3_client.put_bucket_replication(
Bucket=bucket_name,
ReplicationConfiguration={
"Role": "myrole",
@@ -105,44 +105,44 @@ def test_create_and_retrieve_replication_with_single_rules():
},
)
- config = s3.get_bucket_replication(Bucket=bucket_name)["ReplicationConfiguration"]
- config.should.equal(
- {
- "Role": "myrole",
- "Rules": [
- {
- "DeleteMarkerReplication": {"Status": "Disabled"},
- "Destination": {"Bucket": "secondbucket"},
- "Filter": {"Prefix": ""},
- "ID": "firstrule",
- "Priority": 2,
- "Status": "Enabled",
- }
- ],
- }
- )
+ config = s3_client.get_bucket_replication(Bucket=bucket_name)[
+ "ReplicationConfiguration"
+ ]
+ assert config == {
+ "Role": "myrole",
+ "Rules": [
+ {
+ "DeleteMarkerReplication": {"Status": "Disabled"},
+ "Destination": {"Bucket": "secondbucket"},
+ "Filter": {"Prefix": ""},
+ "ID": "firstrule",
+ "Priority": 2,
+ "Status": "Enabled",
+ }
+ ],
+ }
- s3.delete_bucket_replication(Bucket=bucket_name)
+ s3_client.delete_bucket_replication(Bucket=bucket_name)
# Can't retrieve replication that has been deleted
with pytest.raises(ClientError) as exc:
- s3.get_bucket_replication(Bucket=bucket_name)
+ s3_client.get_bucket_replication(Bucket=bucket_name)
err = exc.value.response["Error"]
- err["Code"].should.equal("ReplicationConfigurationNotFoundError")
- err["Message"].should.equal("The replication configuration was not found")
- err["BucketName"].should.equal(bucket_name)
+ assert err["Code"] == "ReplicationConfigurationNotFoundError"
+ assert err["Message"] == "The replication configuration was not found"
+ assert err["BucketName"] == bucket_name
@mock_s3
def test_create_and_retrieve_replication_with_multiple_rules():
bucket_name = str(uuid4())
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_versioning(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_versioning(
Bucket=bucket_name, VersioningConfiguration={"Status": "Enabled"}
)
- s3.put_bucket_replication(
+ s3_client.put_bucket_replication(
Bucket=bucket_name,
ReplicationConfiguration={
"Role": "myrole",
@@ -158,19 +158,21 @@ def test_create_and_retrieve_replication_with_multiple_rules():
},
)
- config = s3.get_bucket_replication(Bucket=bucket_name)["ReplicationConfiguration"]
- config.should.have.key("Role").equal("myrole")
+ config = s3_client.get_bucket_replication(Bucket=bucket_name)[
+ "ReplicationConfiguration"
+ ]
+ assert config["Role"] == "myrole"
rules = config["Rules"]
- rules.should.have.length_of(2)
+ assert len(rules) == 2
first_rule = rules[0]
- first_rule.should.have.key("ID")
- first_rule.should.have.key("Priority").equal(1)
- first_rule.should.have.key("Status").equal("Enabled")
- first_rule.should.have.key("Destination").equal({"Bucket": "secondbucket"})
+ assert "ID" in first_rule
+ assert first_rule["Priority"] == 1
+ assert first_rule["Status"] == "Enabled"
+ assert first_rule["Destination"] == {"Bucket": "secondbucket"}
second = rules[1]
- second.should.have.key("ID").equal("secondrule")
- second.should.have.key("Priority").equal(2)
- second.should.have.key("Status").equal("Disabled")
- second.should.have.key("Destination").equal({"Bucket": "thirdbucket"})
+ assert second["ID"] == "secondrule"
+ assert second["Priority"] == 2
+ assert second["Status"] == "Disabled"
+ assert second["Destination"] == {"Bucket": "thirdbucket"}
diff --git a/tests/test_s3/test_s3_select.py b/tests/test_s3/test_s3_select.py
index 2a67de892..c304b8144 100644
--- a/tests/test_s3/test_s3_select.py
+++ b/tests/test_s3/test_s3_select.py
@@ -1,10 +1,12 @@
-import boto3
import json
-import pytest
-from moto import mock_s3
from unittest import TestCase
from uuid import uuid4
+import boto3
+import pytest
+
+from moto import mock_s3
+
SIMPLE_JSON = {"a1": "b1", "a2": "b2", "a3": None}
SIMPLE_JSON2 = {"a1": "b2", "a3": "b3"}
@@ -53,7 +55,7 @@ class TestS3Select(TestCase):
self.client.delete_bucket(Bucket=self.bucket_name)
def test_query_all(self):
- x = self.client.select_object_content(
+ content = self.client.select_object_content(
Bucket=self.bucket_name,
Key="simple.json",
Expression="SELECT * FROM S3Object",
@@ -61,30 +63,26 @@ class TestS3Select(TestCase):
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
OutputSerialization={"JSON": {"RecordDelimiter": ","}},
)
- result = list(x["Payload"])
- result.should.contain(
- {"Records": {"Payload": b'{"a1":"b1","a2":"b2","a3":null},'}}
- )
+ result = list(content["Payload"])
+ assert {"Records": {"Payload": b'{"a1":"b1","a2":"b2","a3":null},'}} in result
# Verify result is valid JSON
json.loads(result[0]["Records"]["Payload"][0:-1].decode("utf-8"))
# Verify result contains metadata
- result.should.contain(
- {
- "Stats": {
- "Details": {
- "BytesScanned": 24,
- "BytesProcessed": 24,
- "BytesReturned": 22,
- }
+ assert {
+ "Stats": {
+ "Details": {
+ "BytesScanned": 24,
+ "BytesProcessed": 24,
+ "BytesReturned": 22,
}
}
- )
- result.should.contain({"End": {}})
+ } in result
+ assert {"End": {}} in result
def test_count_function(self):
- x = self.client.select_object_content(
+ content = self.client.select_object_content(
Bucket=self.bucket_name,
Key="simple.json",
Expression="SELECT count(*) FROM S3Object",
@@ -92,12 +90,12 @@ class TestS3Select(TestCase):
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
OutputSerialization={"JSON": {"RecordDelimiter": ","}},
)
- result = list(x["Payload"])
- result.should.contain({"Records": {"Payload": b'{"_1":1},'}})
+ result = list(content["Payload"])
+ assert {"Records": {"Payload": b'{"_1":1},'}} in result
@pytest.mark.xfail(message="Not yet implement in our parser")
def test_count_as(self):
- x = self.client.select_object_content(
+ content = self.client.select_object_content(
Bucket=self.bucket_name,
Key="simple.json",
Expression="SELECT count(*) as cnt FROM S3Object",
@@ -105,12 +103,12 @@ class TestS3Select(TestCase):
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
OutputSerialization={"JSON": {"RecordDelimiter": ","}},
)
- result = list(x["Payload"])
- result.should.contain({"Records": {"Payload": b'{"cnt":1},'}})
+ result = list(content["Payload"])
+ assert {"Records": {"Payload": b'{"cnt":1},'}} in result
@pytest.mark.xfail(message="Not yet implement in our parser")
def test_count_list_as(self):
- x = self.client.select_object_content(
+ content = self.client.select_object_content(
Bucket=self.bucket_name,
Key="list.json",
Expression="SELECT count(*) as cnt FROM S3Object",
@@ -118,11 +116,11 @@ class TestS3Select(TestCase):
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
OutputSerialization={"JSON": {"RecordDelimiter": ","}},
)
- result = list(x["Payload"])
- result.should.contain({"Records": {"Payload": b'{"cnt":1},'}})
+ result = list(content["Payload"])
+ assert {"Records": {"Payload": b'{"cnt":1},'}} in result
def test_count_csv(self):
- x = self.client.select_object_content(
+ content = self.client.select_object_content(
Bucket=self.bucket_name,
Key="simple_csv",
Expression="SELECT count(*) FROM S3Object",
@@ -132,11 +130,11 @@ class TestS3Select(TestCase):
},
OutputSerialization={"JSON": {"RecordDelimiter": ","}},
)
- result = list(x["Payload"])
- result.should.contain({"Records": {"Payload": b'{"_1":3},'}})
+ result = list(content["Payload"])
+ assert {"Records": {"Payload": b'{"_1":3},'}} in result
def test_extensive_json__select_list(self):
- x = self.client.select_object_content(
+ content = self.client.select_object_content(
Bucket=self.bucket_name,
Key="extensive.json",
Expression="select * from s3object[*].staff[*] s",
@@ -144,11 +142,11 @@ class TestS3Select(TestCase):
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
OutputSerialization={"JSON": {"RecordDelimiter": ","}},
)
- result = list(x["Payload"])
+ result = list(content["Payload"])
assert {"Records": {"Payload": b"{},"}} in result
def test_extensive_json__select_all(self):
- x = self.client.select_object_content(
+ content = self.client.select_object_content(
Bucket=self.bucket_name,
Key="extensive.json",
Expression="select * from s3object s",
@@ -156,7 +154,7 @@ class TestS3Select(TestCase):
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
OutputSerialization={"JSON": {"RecordDelimiter": ","}},
)
- result = list(x["Payload"])
+ result = list(content["Payload"])
assert {
"Records": {
"Payload": b'{"_1":[{"staff":[{"name":"Janelyn M","city":"Chicago","kids":2},{"name":"Stacy P","city":"Seattle","kids":1}],"country":"USA"}]},'
diff --git a/tests/test_s3/test_s3_storageclass.py b/tests/test_s3/test_s3_storageclass.py
index c0fa60cd6..c2f40ffd2 100644
--- a/tests/test_s3/test_s3_storageclass.py
+++ b/tests/test_s3/test_s3_storageclass.py
@@ -1,6 +1,4 @@
import boto3
-
-import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
import pytest
@@ -9,93 +7,93 @@ from moto import mock_s3
@mock_s3
def test_s3_storage_class_standard():
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="Bucket")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket="Bucket")
# add an object to the bucket with standard storage
- s3.put_object(Bucket="Bucket", Key="my_key", Body="my_value")
+ s3_client.put_object(Bucket="Bucket", Key="my_key", Body="my_value")
- list_of_objects = s3.list_objects(Bucket="Bucket")
+ list_of_objects = s3_client.list_objects(Bucket="Bucket")
- list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
+ assert list_of_objects["Contents"][0]["StorageClass"] == "STANDARD"
@mock_s3
def test_s3_storage_class_infrequent_access():
- s3 = boto3.client("s3")
- s3.create_bucket(
+ s3_client = boto3.client("s3")
+ s3_client.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-2"}
)
# add an object to the bucket with standard storage
- s3.put_object(
+ s3_client.put_object(
Bucket="Bucket",
Key="my_key_infrequent",
Body="my_value_infrequent",
StorageClass="STANDARD_IA",
)
- D = s3.list_objects(Bucket="Bucket")
+ objs = s3_client.list_objects(Bucket="Bucket")
- D["Contents"][0]["StorageClass"].should.equal("STANDARD_IA")
+ assert objs["Contents"][0]["StorageClass"] == "STANDARD_IA"
@mock_s3
def test_s3_storage_class_intelligent_tiering():
- s3 = boto3.client("s3")
+ s3_client = boto3.client("s3")
- s3.create_bucket(
+ s3_client.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-east-2"}
)
- s3.put_object(
+ s3_client.put_object(
Bucket="Bucket",
Key="my_key_infrequent",
Body="my_value_infrequent",
StorageClass="INTELLIGENT_TIERING",
)
- objects = s3.list_objects(Bucket="Bucket")
+ objects = s3_client.list_objects(Bucket="Bucket")
- objects["Contents"][0]["StorageClass"].should.equal("INTELLIGENT_TIERING")
+ assert objects["Contents"][0]["StorageClass"] == "INTELLIGENT_TIERING"
@mock_s3
def test_s3_storage_class_copy():
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="Bucket")
- s3.put_object(
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket="Bucket")
+ s3_client.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
)
- s3.create_bucket(Bucket="Bucket2")
+ s3_client.create_bucket(Bucket="Bucket2")
# second object is originally of storage class REDUCED_REDUNDANCY
- s3.put_object(Bucket="Bucket2", Key="Second_Object", Body="Body2")
+ s3_client.put_object(Bucket="Bucket2", Key="Second_Object", Body="Body2")
- s3.copy_object(
+ s3_client.copy_object(
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
Bucket="Bucket2",
Key="Second_Object",
StorageClass="ONEZONE_IA",
)
- list_of_copied_objects = s3.list_objects(Bucket="Bucket2")
+ list_of_copied_objects = s3_client.list_objects(Bucket="Bucket2")
# checks that a copied object can be properly copied
- list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("ONEZONE_IA")
+ assert list_of_copied_objects["Contents"][0]["StorageClass"] == "ONEZONE_IA"
@mock_s3
def test_s3_invalid_copied_storage_class():
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="Bucket")
- s3.put_object(
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket="Bucket")
+ s3_client.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
)
- s3.create_bucket(Bucket="Bucket2")
- s3.put_object(
+ s3_client.create_bucket(Bucket="Bucket2")
+ s3_client.put_object(
Bucket="Bucket2",
Key="Second_Object",
Body="Body2",
@@ -104,175 +102,179 @@ def test_s3_invalid_copied_storage_class():
# Try to copy an object with an invalid storage class
with pytest.raises(ClientError) as err:
- s3.copy_object(
+ s3_client.copy_object(
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
Bucket="Bucket2",
Key="Second_Object",
StorageClass="STANDARD2",
)
- e = err.value
- e.response["Error"]["Code"].should.equal("InvalidStorageClass")
- e.response["Error"]["Message"].should.equal(
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "InvalidStorageClass"
+ assert err_value.response["Error"]["Message"] == (
"The storage class you specified is not valid"
)
@mock_s3
def test_s3_invalid_storage_class():
- s3 = boto3.client("s3")
- s3.create_bucket(
+ s3_client = boto3.client("s3")
+ s3_client.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
# Try to add an object with an invalid storage class
with pytest.raises(ClientError) as err:
- s3.put_object(
+ s3_client.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARDD"
)
- e = err.value
- e.response["Error"]["Code"].should.equal("InvalidStorageClass")
- e.response["Error"]["Message"].should.equal(
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "InvalidStorageClass"
+ assert err_value.response["Error"]["Message"] == (
"The storage class you specified is not valid"
)
@mock_s3
def test_s3_default_storage_class():
- s3 = boto3.client("s3")
- s3.create_bucket(
+ s3_client = boto3.client("s3")
+ s3_client.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
- s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
+ s3_client.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
- list_of_objects = s3.list_objects(Bucket="Bucket")
+ list_of_objects = s3_client.list_objects(Bucket="Bucket")
# tests that the default storage class is still STANDARD
- list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
+ assert list_of_objects["Contents"][0]["StorageClass"] == "STANDARD"
@mock_s3
def test_s3_copy_object_error_for_glacier_storage_class_not_restored():
- s3 = boto3.client("s3")
- s3.create_bucket(
+ s3_client = boto3.client("s3")
+ s3_client.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
- s3.put_object(
+ s3_client.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
)
- with pytest.raises(ClientError) as ex:
- s3.copy_object(
+ with pytest.raises(ClientError) as exc:
+ s3_client.copy_object(
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
Bucket="Bucket",
Key="Second_Object",
)
- ex.value.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError")
+ assert exc.value.response["Error"]["Code"] == "ObjectNotInActiveTierError"
@mock_s3
def test_s3_copy_object_error_for_deep_archive_storage_class_not_restored():
- s3 = boto3.client("s3")
- s3.create_bucket(
+ s3_client = boto3.client("s3")
+ s3_client.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
- s3.put_object(
+ s3_client.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"
)
with pytest.raises(ClientError) as exc:
- s3.copy_object(
+ s3_client.copy_object(
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
Bucket="Bucket",
Key="Second_Object",
)
- exc.value.response["Error"]["Code"].should.equal("ObjectNotInActiveTierError")
+ assert exc.value.response["Error"]["Code"] == "ObjectNotInActiveTierError"
@mock_s3
def test_s3_copy_object_for_glacier_storage_class_restored():
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="Bucket")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket="Bucket")
- s3.put_object(
+ s3_client.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
)
- s3.create_bucket(Bucket="Bucket2")
- s3.restore_object(Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123})
+ s3_client.create_bucket(Bucket="Bucket2")
+ s3_client.restore_object(
+ Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123}
+ )
- s3.copy_object(
+ s3_client.copy_object(
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
Bucket="Bucket2",
Key="Second_Object",
)
- list_of_copied_objects = s3.list_objects(Bucket="Bucket2")
+ list_of_copied_objects = s3_client.list_objects(Bucket="Bucket2")
# checks that copy of restored Glacier object has STANDARD storage class
- list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
+ assert list_of_copied_objects["Contents"][0]["StorageClass"] == "STANDARD"
# checks that metadata of copy has no Restore property
- s3.head_object(Bucket="Bucket2", Key="Second_Object").should.not_have.property(
- "Restore"
+ assert not hasattr(
+ s3_client.head_object(Bucket="Bucket2", Key="Second_Object"), "Restore"
)
@mock_s3
def test_s3_copy_object_for_deep_archive_storage_class_restored():
- s3 = boto3.client("s3", region_name="us-east-1")
- s3.create_bucket(Bucket="Bucket")
+ s3_client = boto3.client("s3", region_name="us-east-1")
+ s3_client.create_bucket(Bucket="Bucket")
- s3.put_object(
+ s3_client.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"
)
with pytest.raises(ClientError) as exc:
- s3.get_object(Bucket="Bucket", Key="First_Object")
+ s3_client.get_object(Bucket="Bucket", Key="First_Object")
err = exc.value.response["Error"]
- err["Code"].should.equal("InvalidObjectState")
- err["Message"].should.equal(
+ assert err["Code"] == "InvalidObjectState"
+ assert err["Message"] == (
"The operation is not valid for the object's storage class"
)
- err["StorageClass"].should.equal("DEEP_ARCHIVE")
+ assert err["StorageClass"] == "DEEP_ARCHIVE"
- s3.create_bucket(Bucket="Bucket2")
- s3.restore_object(Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123})
- s3.get_object(Bucket="Bucket", Key="First_Object")
+ s3_client.create_bucket(Bucket="Bucket2")
+ s3_client.restore_object(
+ Bucket="Bucket", Key="First_Object", RestoreRequest={"Days": 123}
+ )
+ s3_client.get_object(Bucket="Bucket", Key="First_Object")
- s3.copy_object(
+ s3_client.copy_object(
CopySource={"Bucket": "Bucket", "Key": "First_Object"},
Bucket="Bucket2",
Key="Second_Object",
)
- list_of_copied_objects = s3.list_objects(Bucket="Bucket2")
+ list_of_copied_objects = s3_client.list_objects(Bucket="Bucket2")
# checks that copy of restored Glacier object has STANDARD storage class
- list_of_copied_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
+ assert list_of_copied_objects["Contents"][0]["StorageClass"] == "STANDARD"
# checks that metadata of copy has no Restore property
- s3.head_object(Bucket="Bucket2", Key="Second_Object").should.not_have.property(
- "Restore"
+ assert not hasattr(
+ s3_client.head_object(Bucket="Bucket2", Key="Second_Object"), "Restore"
)
@mock_s3
def test_s3_get_object_from_glacier():
- s3 = boto3.client("s3", region_name="us-east-1")
+ s3_client = boto3.client("s3", region_name="us-east-1")
bucket_name = "tests3getobjectfromglacier"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
- s3.put_object(
+ s3_client.put_object(
Bucket=bucket_name, Key="test.txt", Body="contents", StorageClass="GLACIER"
)
with pytest.raises(ClientError) as exc:
- s3.get_object(Bucket=bucket_name, Key="test.txt")
+ s3_client.get_object(Bucket=bucket_name, Key="test.txt")
err = exc.value.response["Error"]
- err["Code"].should.equal("InvalidObjectState")
- err["Message"].should.equal(
+ assert err["Code"] == "InvalidObjectState"
+ assert err["Message"] == (
"The operation is not valid for the object's storage class"
)
- err["StorageClass"].should.equal("GLACIER")
+ assert err["StorageClass"] == "GLACIER"
diff --git a/tests/test_s3/test_s3_tagging.py b/tests/test_s3/test_s3_tagging.py
index dc85e41b1..8abd6a9da 100644
--- a/tests/test_s3/test_s3_tagging.py
+++ b/tests/test_s3/test_s3_tagging.py
@@ -1,13 +1,10 @@
import boto3
-import requests
-import pytest
-import sure # noqa # pylint: disable=unused-import
-
from botocore.client import ClientError
-from moto.s3.responses import DEFAULT_REGION_NAME
-
+import pytest
+import requests
from moto import mock_s3
+from moto.s3.responses import DEFAULT_REGION_NAME
@mock_s3
@@ -16,54 +13,56 @@ def test_get_bucket_tagging_unknown_bucket():
with pytest.raises(ClientError) as ex:
client.get_bucket_tagging(Bucket="foobar")
- ex.value.response["Error"]["Code"].should.equal("NoSuchBucket")
- ex.value.response["Error"]["Message"].should.equal(
+ assert ex.value.response["Error"]["Code"] == "NoSuchBucket"
+ assert ex.value.response["Error"]["Message"] == (
"The specified bucket does not exist"
)
@mock_s3
def test_put_object_with_tagging():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
# using system tags will fail
with pytest.raises(ClientError) as err:
- s3.put_object(Bucket=bucket_name, Key=key, Body="test", Tagging="aws:foo=bar")
+ s3_client.put_object(
+ Bucket=bucket_name, Key=key, Body="test", Tagging="aws:foo=bar"
+ )
- e = err.value
- e.response["Error"]["Code"].should.equal("InvalidTag")
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "InvalidTag"
- s3.put_object(Bucket=bucket_name, Key=key, Body="test", Tagging="foo=bar")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test", Tagging="foo=bar")
- s3.get_object_tagging(Bucket=bucket_name, Key=key)["TagSet"].should.contain(
- {"Key": "foo", "Value": "bar"}
- )
+ assert {"Key": "foo", "Value": "bar"} in s3_client.get_object_tagging(
+ Bucket=bucket_name, Key=key
+ )["TagSet"]
- resp = s3.get_object(Bucket=bucket_name, Key=key)
- resp.should.have.key("TagCount").equals(1)
+ resp = s3_client.get_object(Bucket=bucket_name, Key=key)
+ assert resp["TagCount"] == 1
- s3.delete_object_tagging(Bucket=bucket_name, Key=key)
+ s3_client.delete_object_tagging(Bucket=bucket_name, Key=key)
- s3.get_object_tagging(Bucket=bucket_name, Key=key)["TagSet"].should.equal([])
+ assert s3_client.get_object_tagging(Bucket=bucket_name, Key=key)["TagSet"] == []
@mock_s3
def test_put_bucket_tagging():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
# With 1 tag:
- resp = s3.put_bucket_tagging(
+ resp = s3_client.put_bucket_tagging(
Bucket=bucket_name, Tagging={"TagSet": [{"Key": "TagOne", "Value": "ValueOne"}]}
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
# With multiple tags:
- resp = s3.put_bucket_tagging(
+ resp = s3_client.put_bucket_tagging(
Bucket=bucket_name,
Tagging={
"TagSet": [
@@ -74,15 +73,15 @@ def test_put_bucket_tagging():
},
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
# No tags is also OK:
- resp = s3.put_bucket_tagging(Bucket=bucket_name, Tagging={"TagSet": []})
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ resp = s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging={"TagSet": []})
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
# With duplicate tag keys:
with pytest.raises(ClientError) as err:
- s3.put_bucket_tagging(
+ s3_client.put_bucket_tagging(
Bucket=bucket_name,
Tagging={
"TagSet": [
@@ -91,26 +90,26 @@ def test_put_bucket_tagging():
]
},
)
- e = err.value
- e.response["Error"]["Code"].should.equal("InvalidTag")
- e.response["Error"]["Message"].should.equal(
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "InvalidTag"
+ assert err_value.response["Error"]["Message"] == (
"Cannot provide multiple Tags with the same key"
)
# Cannot put tags that are "system" tags - i.e. tags that start with "aws:"
- with pytest.raises(ClientError) as ce:
- s3.put_bucket_tagging(
+ with pytest.raises(ClientError) as ce_exc:
+ s3_client.put_bucket_tagging(
Bucket=bucket_name,
Tagging={"TagSet": [{"Key": "aws:sometag", "Value": "nope"}]},
)
- e = ce.value
- e.response["Error"]["Code"].should.equal("InvalidTag")
- e.response["Error"]["Message"].should.equal(
+ err_value = ce_exc.value
+ assert err_value.response["Error"]["Code"] == "InvalidTag"
+ assert err_value.response["Error"]["Message"] == (
"System tags cannot be added/updated by requester"
)
# This is OK though:
- s3.put_bucket_tagging(
+ s3_client.put_bucket_tagging(
Bucket=bucket_name,
Tagging={"TagSet": [{"Key": "something:aws:stuff", "Value": "this is fine"}]},
)
@@ -118,10 +117,10 @@ def test_put_bucket_tagging():
@mock_s3
def test_get_bucket_tagging():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
- s3.create_bucket(Bucket=bucket_name)
- s3.put_bucket_tagging(
+ s3_client.create_bucket(Bucket=bucket_name)
+ s3_client.put_bucket_tagging(
Bucket=bucket_name,
Tagging={
"TagSet": [
@@ -132,28 +131,28 @@ def test_get_bucket_tagging():
)
# Get the tags for the bucket:
- resp = s3.get_bucket_tagging(Bucket=bucket_name)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
- len(resp["TagSet"]).should.equal(2)
+ resp = s3_client.get_bucket_tagging(Bucket=bucket_name)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
+ assert len(resp["TagSet"]) == 2
# With no tags:
- s3.put_bucket_tagging(Bucket=bucket_name, Tagging={"TagSet": []})
+ s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging={"TagSet": []})
with pytest.raises(ClientError) as err:
- s3.get_bucket_tagging(Bucket=bucket_name)
+ s3_client.get_bucket_tagging(Bucket=bucket_name)
- e = err.value
- e.response["Error"]["Code"].should.equal("NoSuchTagSet")
- e.response["Error"]["Message"].should.equal("The TagSet does not exist")
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "NoSuchTagSet"
+ assert err_value.response["Error"]["Message"] == "The TagSet does not exist"
@mock_s3
def test_delete_bucket_tagging():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
- s3.put_bucket_tagging(
+ s3_client.put_bucket_tagging(
Bucket=bucket_name,
Tagging={
"TagSet": [
@@ -163,26 +162,26 @@ def test_delete_bucket_tagging():
},
)
- resp = s3.delete_bucket_tagging(Bucket=bucket_name)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(204)
+ resp = s3_client.delete_bucket_tagging(Bucket=bucket_name)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 204
with pytest.raises(ClientError) as err:
- s3.get_bucket_tagging(Bucket=bucket_name)
+ s3_client.get_bucket_tagging(Bucket=bucket_name)
- e = err.value
- e.response["Error"]["Code"].should.equal("NoSuchTagSet")
- e.response["Error"]["Message"].should.equal("The TagSet does not exist")
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "NoSuchTagSet"
+ assert err_value.response["Error"]["Message"] == "The TagSet does not exist"
@mock_s3
def test_put_object_tagging():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
with pytest.raises(ClientError) as err:
- s3.put_object_tagging(
+ s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={
@@ -193,30 +192,28 @@ def test_put_object_tagging():
},
)
- e = err.value
- e.response["Error"].should.equal(
- {
- "Code": "NoSuchKey",
- "Message": "The specified key does not exist.",
- "Key": "key-with-tags",
- "RequestID": "7a62c49f-347e-4fc4-9331-6e8eEXAMPLE",
- }
- )
+ err_value = err.value
+ assert err_value.response["Error"] == {
+ "Code": "NoSuchKey",
+ "Message": "The specified key does not exist.",
+ "Key": "key-with-tags",
+ "RequestID": "7a62c49f-347e-4fc4-9331-6e8eEXAMPLE",
+ }
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
# using system tags will fail
with pytest.raises(ClientError) as err:
- s3.put_object_tagging(
+ s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={"TagSet": [{"Key": "aws:item1", "Value": "foo"}]},
)
- e = err.value
- e.response["Error"]["Code"].should.equal("InvalidTag")
+ err_value = err.value
+ assert err_value.response["Error"]["Code"] == "InvalidTag"
- resp = s3.put_object_tagging(
+ resp = s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={
@@ -228,22 +225,22 @@ def test_put_object_tagging():
},
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
@mock_s3
def test_put_object_tagging_on_earliest_version():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
s3_resource = boto3.resource("s3")
bucket_versioning = s3_resource.BucketVersioning(bucket_name)
bucket_versioning.enable()
- bucket_versioning.status.should.equal("Enabled")
+ assert bucket_versioning.status == "Enabled"
with pytest.raises(ClientError) as err:
- s3.put_object_tagging(
+ s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={
@@ -254,24 +251,22 @@ def test_put_object_tagging_on_earliest_version():
},
)
- e = err.value
- e.response["Error"].should.equal(
- {
- "Code": "NoSuchKey",
- "Message": "The specified key does not exist.",
- "Key": "key-with-tags",
- "RequestID": "7a62c49f-347e-4fc4-9331-6e8eEXAMPLE",
- }
- )
+ err_value = err.value
+ assert err_value.response["Error"] == {
+ "Code": "NoSuchKey",
+ "Message": "The specified key does not exist.",
+ "Key": "key-with-tags",
+ "RequestID": "7a62c49f-347e-4fc4-9331-6e8eEXAMPLE",
+ }
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
- s3.put_object(Bucket=bucket_name, Key=key, Body="test_updated")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test_updated")
object_versions = list(s3_resource.Bucket(bucket_name).object_versions.all())
first_object = object_versions[0]
second_object = object_versions[1]
- resp = s3.put_object_tagging(
+ resp = s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={
@@ -283,38 +278,41 @@ def test_put_object_tagging_on_earliest_version():
VersionId=first_object.id,
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
# Older version has tags while the most recent does not
- resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id)
- resp["VersionId"].should.equal(first_object.id)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
- sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
- sorted_tagset.should.equal(
- [{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}]
+ resp = s3_client.get_object_tagging(
+ Bucket=bucket_name, Key=key, VersionId=first_object.id
)
+ assert resp["VersionId"] == first_object.id
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
+ sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
+ assert sorted_tagset == [
+ {"Key": "item1", "Value": "foo"},
+ {"Key": "item2", "Value": "bar"},
+ ]
- resp = s3.get_object_tagging(
+ resp = s3_client.get_object_tagging(
Bucket=bucket_name, Key=key, VersionId=second_object.id
)
- resp["VersionId"].should.equal(second_object.id)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
- resp["TagSet"].should.equal([])
+ assert resp["VersionId"] == second_object.id
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
+ assert resp["TagSet"] == []
@mock_s3
def test_put_object_tagging_on_both_version():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
s3_resource = boto3.resource("s3")
bucket_versioning = s3_resource.BucketVersioning(bucket_name)
bucket_versioning.enable()
- bucket_versioning.status.should.equal("Enabled")
+ assert bucket_versioning.status == "Enabled"
with pytest.raises(ClientError) as err:
- s3.put_object_tagging(
+ s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={
@@ -325,24 +323,22 @@ def test_put_object_tagging_on_both_version():
},
)
- e = err.value
- e.response["Error"].should.equal(
- {
- "Code": "NoSuchKey",
- "Message": "The specified key does not exist.",
- "Key": "key-with-tags",
- "RequestID": "7a62c49f-347e-4fc4-9331-6e8eEXAMPLE",
- }
- )
+ err_value = err.value
+ assert err_value.response["Error"] == {
+ "Code": "NoSuchKey",
+ "Message": "The specified key does not exist.",
+ "Key": "key-with-tags",
+ "RequestID": "7a62c49f-347e-4fc4-9331-6e8eEXAMPLE",
+ }
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
- s3.put_object(Bucket=bucket_name, Key=key, Body="test_updated")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test_updated")
object_versions = list(s3_resource.Bucket(bucket_name).object_versions.all())
first_object = object_versions[0]
second_object = object_versions[1]
- resp = s3.put_object_tagging(
+ resp = s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={
@@ -353,9 +349,9 @@ def test_put_object_tagging_on_both_version():
},
VersionId=first_object.id,
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
- resp = s3.put_object_tagging(
+ resp = s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={
@@ -366,56 +362,60 @@ def test_put_object_tagging_on_both_version():
},
VersionId=second_object.id,
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
- resp = s3.get_object_tagging(Bucket=bucket_name, Key=key, VersionId=first_object.id)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
- sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
- sorted_tagset.should.equal(
- [{"Key": "item1", "Value": "foo"}, {"Key": "item2", "Value": "bar"}]
+ resp = s3_client.get_object_tagging(
+ Bucket=bucket_name, Key=key, VersionId=first_object.id
)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
+ sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
+ assert sorted_tagset == [
+ {"Key": "item1", "Value": "foo"},
+ {"Key": "item2", "Value": "bar"},
+ ]
- resp = s3.get_object_tagging(
+ resp = s3_client.get_object_tagging(
Bucket=bucket_name, Key=key, VersionId=second_object.id
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
sorted_tagset = sorted(resp["TagSet"], key=lambda t: t["Key"])
- sorted_tagset.should.equal(
- [{"Key": "item1", "Value": "baz"}, {"Key": "item2", "Value": "bin"}]
- )
+ assert sorted_tagset == [
+ {"Key": "item1", "Value": "baz"},
+ {"Key": "item2", "Value": "bin"},
+ ]
@mock_s3
def test_put_object_tagging_with_single_tag():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
- resp = s3.put_object_tagging(
+ resp = s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={"TagSet": [{"Key": "item1", "Value": "foo"}]},
)
- resp["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
+ assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
@mock_s3
def test_get_object_tagging():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
- s3.create_bucket(Bucket=bucket_name)
+ s3_client.create_bucket(Bucket=bucket_name)
- s3.put_object(Bucket=bucket_name, Key=key, Body="test")
+ s3_client.put_object(Bucket=bucket_name, Key=key, Body="test")
- resp = s3.get_object_tagging(Bucket=bucket_name, Key=key)
- resp["TagSet"].should.have.length_of(0)
+ resp = s3_client.get_object_tagging(Bucket=bucket_name, Key=key)
+ assert not resp["TagSet"]
- s3.put_object_tagging(
+ s3_client.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={
@@ -425,34 +425,34 @@ def test_get_object_tagging():
]
},
)
- resp = s3.get_object_tagging(Bucket=bucket_name, Key=key)
+ resp = s3_client.get_object_tagging(Bucket=bucket_name, Key=key)
- resp["TagSet"].should.have.length_of(2)
- resp["TagSet"].should.contain({"Key": "item1", "Value": "foo"})
- resp["TagSet"].should.contain({"Key": "item2", "Value": "bar"})
+ assert len(resp["TagSet"]) == 2
+ assert {"Key": "item1", "Value": "foo"} in resp["TagSet"]
+ assert {"Key": "item2", "Value": "bar"} in resp["TagSet"]
@mock_s3
def test_objects_tagging_with_same_key_name():
- s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
+ s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
key_name = "file.txt"
bucket1 = "bucket-1"
- s3.create_bucket(Bucket=bucket1)
+ s3_client.create_bucket(Bucket=bucket1)
tagging = "variable=one"
- s3.put_object(Bucket=bucket1, Body=b"test", Key=key_name, Tagging=tagging)
+ s3_client.put_object(Bucket=bucket1, Body=b"test", Key=key_name, Tagging=tagging)
bucket2 = "bucket-2"
- s3.create_bucket(Bucket=bucket2)
+ s3_client.create_bucket(Bucket=bucket2)
tagging2 = "variable=two"
- s3.put_object(Bucket=bucket2, Body=b"test", Key=key_name, Tagging=tagging2)
+ s3_client.put_object(Bucket=bucket2, Body=b"test", Key=key_name, Tagging=tagging2)
- variable1 = s3.get_object_tagging(Bucket=bucket1, Key=key_name)["TagSet"][0][
+ variable1 = s3_client.get_object_tagging(Bucket=bucket1, Key=key_name)["TagSet"][0][
"Value"
]
- variable2 = s3.get_object_tagging(Bucket=bucket2, Key=key_name)["TagSet"][0][
+ variable2 = s3_client.get_object_tagging(Bucket=bucket2, Key=key_name)["TagSet"][0][
"Value"
]
@@ -462,14 +462,14 @@ def test_objects_tagging_with_same_key_name():
@mock_s3
def test_generate_url_for_tagged_object():
- s3 = boto3.client("s3")
- s3.create_bucket(Bucket="my-bucket")
- s3.put_object(
+ s3_client = boto3.client("s3")
+ s3_client.create_bucket(Bucket="my-bucket")
+ s3_client.put_object(
Bucket="my-bucket", Key="test.txt", Body=b"abc", Tagging="MyTag=value"
)
- url = s3.generate_presigned_url(
+ url = s3_client.generate_presigned_url(
"get_object", Params={"Bucket": "my-bucket", "Key": "test.txt"}
)
response = requests.get(url)
- response.content.should.equal(b"abc")
- response.headers["x-amz-tagging-count"].should.equal("1")
+ assert response.content == b"abc"
+ assert response.headers["x-amz-tagging-count"] == "1"
diff --git a/tests/test_s3/test_s3_utils.py b/tests/test_s3/test_s3_utils.py
index 529977cb9..55ccf7cad 100644
--- a/tests/test_s3/test_s3_utils.py
+++ b/tests/test_s3/test_s3_utils.py
@@ -1,5 +1,6 @@
+from unittest.mock import patch
+
import pytest
-from sure import expect
from moto.s3.utils import (
bucket_name_from_url,
_VersionedKeyStore,
@@ -9,66 +10,63 @@ from moto.s3.utils import (
compute_checksum,
cors_matches_origin,
)
-from unittest.mock import patch
def test_base_url():
- expect(bucket_name_from_url("https://s3.amazonaws.com/")).should.equal(None)
+ assert bucket_name_from_url("https://s3.amazonaws.com/") is None
def test_localhost_bucket():
- expect(bucket_name_from_url("https://wfoobar.localhost:5000/abc")).should.equal(
- "wfoobar"
- )
+ assert bucket_name_from_url("https://wfoobar.localhost:5000/abc") == "wfoobar"
def test_localhost_without_bucket():
- expect(bucket_name_from_url("https://www.localhost:5000/def")).should.equal(None)
+ assert bucket_name_from_url("https://www.localhost:5000/def") is None
def test_force_ignore_subdomain_for_bucketnames():
with patch("moto.s3.utils.S3_IGNORE_SUBDOMAIN_BUCKETNAME", True):
- expect(
+ assert (
bucket_name_from_url("https://subdomain.localhost:5000/abc/resource")
- ).should.equal(None)
+ is None
+ )
def test_versioned_key_store():
- d = _VersionedKeyStore()
+ key_store = _VersionedKeyStore()
- d.should.have.length_of(0)
+ assert not key_store
- d["key"] = [1]
+ key_store["key"] = [1]
+ assert len(key_store) == 1
- d.should.have.length_of(1)
+ key_store["key"] = 2
+ assert len(key_store) == 1
- d["key"] = 2
- d.should.have.length_of(1)
-
- d.should.have.key("key").being.equal(2)
-
- d.get.when.called_with("key").should.return_value(2)
- d.get.when.called_with("badkey").should.return_value(None)
- d.get.when.called_with("badkey", "HELLO").should.return_value("HELLO")
+ assert key_store["key"] == 2
+ assert key_store.get("key") == 2
+ assert key_store.get("badkey") is None
+ assert key_store.get("badkey", "HELLO") == "HELLO"
# Tests key[
- d.shouldnt.have.key("badkey")
- d.__getitem__.when.called_with("badkey").should.throw(KeyError)
+ assert "badkey" not in key_store
+ with pytest.raises(KeyError):
+ _ = key_store["badkey"]
- d.getlist("key").should.have.length_of(2)
- d.getlist("key").should.be.equal([[1], 2])
- d.getlist("badkey").should.be.none
+ assert len(key_store.getlist("key")) == 2
+ assert key_store.getlist("key") == [[1], 2]
+ assert key_store.getlist("badkey") is None
- d.setlist("key", 1)
- d.getlist("key").should.be.equal([1])
+ key_store.setlist("key", 1)
+ assert key_store.getlist("key") == [1]
- d.setlist("key", (1, 2))
- d.getlist("key").shouldnt.be.equal((1, 2))
- d.getlist("key").should.be.equal([1, 2])
+ key_store.setlist("key", (1, 2))
+ assert key_store.getlist("key") != (1, 2)
+ assert key_store.getlist("key") == [1, 2]
- d.setlist("key", [[1], [2]])
- d["key"].should.have.length_of(1)
- d.getlist("key").should.be.equal([[1], [2]])
+ key_store.setlist("key", [[1], [2]])
+ assert len(key_store["key"]) == 1
+ assert key_store.getlist("key") == [[1], [2]]
def test_parse_region_from_url():
@@ -81,7 +79,7 @@ def test_parse_region_from_url():
"https://s3.us-west-2.amazonaws.com/bucket",
"https://bucket.s3-us-west-2.amazonaws.com",
]:
- parse_region_from_url(url).should.equal(expected)
+ assert parse_region_from_url(url) == expected
expected = "us-east-1"
for url in [
@@ -90,7 +88,7 @@ def test_parse_region_from_url():
"https://s3.amazonaws.com/bucket",
"https://bucket.s3.amazonaws.com",
]:
- parse_region_from_url(url).should.equal(expected)
+ assert parse_region_from_url(url) == expected
@pytest.mark.parametrize(
@@ -105,7 +103,7 @@ def test_parse_region_from_url():
],
)
def test_clean_key_name(key, expected):
- clean_key_name(key).should.equal(expected)
+ assert clean_key_name(key) == expected
@pytest.mark.parametrize(
@@ -120,32 +118,32 @@ def test_clean_key_name(key, expected):
],
)
def test_undo_clean_key_name(key, expected):
- undo_clean_key_name(key).should.equal(expected)
+ assert undo_clean_key_name(key) == expected
def test_checksum_sha256():
checksum = b"h9FJy0JMA4dlbyEdJYn7Wx4WIpkhMJ6YWIQZzMqKc2I="
- compute_checksum(b"somedata", "SHA256").should.equal(checksum)
+ assert compute_checksum(b"somedata", "SHA256") == checksum
# Unknown algorithms fallback to SHA256 for now
- compute_checksum(b"somedata", algorithm="unknown").should.equal(checksum)
+ assert compute_checksum(b"somedata", algorithm="unknown") == checksum
def test_checksum_sha1():
- compute_checksum(b"somedata", "SHA1").should.equal(b"76oxGuRIpzdMEiBhv+2VLZQOnjc=")
+ assert compute_checksum(b"somedata", "SHA1") == b"76oxGuRIpzdMEiBhv+2VLZQOnjc="
def test_checksum_crc32():
- compute_checksum(b"somedata", "CRC32").should.equal(b"Uwy90A==")
+ assert compute_checksum(b"somedata", "CRC32") == b"Uwy90A=="
def test_checksum_crc32c():
try:
import crc32c # noqa # pylint: disable=unused-import
- compute_checksum(b"somedata", "CRC32C").should.equal(b"dB9qBQ==")
+ assert compute_checksum(b"somedata", "CRC32C") == b"dB9qBQ=="
except: # noqa: E722 Do not use bare except
# Optional library Can't be found - just revert to CRC32
- compute_checksum(b"somedata", "CRC32C").should.equal(b"Uwy90A==")
+ assert compute_checksum(b"somedata", "CRC32C") == b"Uwy90A=="
def test_cors_utils():
diff --git a/tests/test_s3/test_server.py b/tests/test_s3/test_server.py
index 8017b86f2..6e48a9e6e 100644
--- a/tests/test_s3/test_server.py
+++ b/tests/test_s3/test_server.py
@@ -1,18 +1,15 @@
+"""Test different server responses."""
import io
+from unittest.mock import patch
from urllib.parse import urlparse, parse_qs
-import sure # noqa # pylint: disable=unused-import
-import requests
-import pytest
import xmltodict
from flask.testing import FlaskClient
+import pytest
+import requests
+
import moto.server as server
from moto.moto_server.threaded_moto_server import ThreadedMotoServer
-from unittest.mock import patch
-
-"""
-Test the different server responses
-"""
class AuthenticatedClient(FlaskClient):
@@ -33,7 +30,7 @@ def test_s3_server_get():
test_client = authenticated_client()
res = test_client.get("/")
- res.data.should.contain(b"ListAllMyBucketsResult")
+ assert b"ListAllMyBucketsResult" in res.data
@pytest.mark.parametrize("key_name", ["bar_baz", "bar+baz", "baz bar"])
@@ -41,31 +38,31 @@ def test_s3_server_bucket_create(key_name):
test_client = authenticated_client()
res = test_client.put("/", "http://foobaz.localhost:5000/")
- res.status_code.should.equal(200)
+ assert res.status_code == 200
res = test_client.get("/")
- res.data.should.contain(b"foobaz")
+ assert b"foobaz" in res.data
res = test_client.get("/", "http://foobaz.localhost:5000/")
- res.status_code.should.equal(200)
- res.data.should.contain(b"ListBucketResult")
+ assert res.status_code == 200
+ assert b"ListBucketResult" in res.data
res = test_client.put(
f"/{key_name}", "http://foobaz.localhost:5000/", data="test value"
)
- res.status_code.should.equal(200)
+ assert res.status_code == 200
assert "ETag" in dict(res.headers)
# ListBuckets
res = test_client.get(
"/", "http://foobaz.localhost:5000/", query_string={"prefix": key_name}
)
- res.status_code.should.equal(200)
+ assert res.status_code == 200
content = xmltodict.parse(res.data)["ListBucketResult"]["Contents"]
# If we receive a dict, we only received one result
# If content is of type list, our call returned multiple results - which is not correct
- content.should.be.a(dict)
- content["Key"].should.equal(key_name)
+ assert isinstance(content, dict)
+ assert content["Key"] == key_name
# GetBucket
res = test_client.head("http://foobaz.localhost:5000")
@@ -74,13 +71,13 @@ def test_s3_server_bucket_create(key_name):
# HeadObject
res = test_client.head(f"/{key_name}", "http://foobaz.localhost:5000/")
- res.status_code.should.equal(200)
+ assert res.status_code == 200
assert res.headers.get("Accept-Ranges") == "bytes"
# GetObject
res = test_client.get(f"/{key_name}", "http://foobaz.localhost:5000/")
- res.status_code.should.equal(200)
- res.data.should.equal(b"test value")
+ assert res.status_code == 200
+ assert res.data == b"test value"
assert res.headers.get("Accept-Ranges") == "bytes"
@@ -89,27 +86,27 @@ def test_s3_server_ignore_subdomain_for_bucketnames():
test_client = authenticated_client()
res = test_client.put("/mybucket", "http://foobaz.localhost:5000/")
- res.status_code.should.equal(200)
- res.data.should.contain(b"mybucket")
+ assert res.status_code == 200
+ assert b"mybucket" in res.data
def test_s3_server_bucket_versioning():
test_client = authenticated_client()
res = test_client.put("/", "http://foobaz.localhost:5000/")
- res.status_code.should.equal(200)
+ assert res.status_code == 200
# Just enough XML to enable versioning
body = "Enabled"
res = test_client.put("/?versioning", "http://foobaz.localhost:5000", data=body)
- res.status_code.should.equal(200)
+ assert res.status_code == 200
def test_s3_server_post_to_bucket():
test_client = authenticated_client()
res = test_client.put("/", "http://tester.localhost:5000/")
- res.status_code.should.equal(200)
+ assert res.status_code == 200
test_client.post(
"/",
@@ -118,15 +115,15 @@ def test_s3_server_post_to_bucket():
)
res = test_client.get("/the-key", "http://tester.localhost:5000/")
- res.status_code.should.equal(200)
- res.data.should.equal(b"nothing")
+ assert res.status_code == 200
+ assert res.data == b"nothing"
def test_s3_server_post_to_bucket_redirect():
test_client = authenticated_client()
res = test_client.put("/", "http://tester.localhost:5000/")
- res.status_code.should.equal(200)
+ assert res.status_code == 200
redirect_base = "https://redirect.com/success/"
filecontent = "nothing"
@@ -141,7 +138,7 @@ def test_s3_server_post_to_bucket_redirect():
},
)
real_key = f"asdf/the-key/{filename}"
- res.status_code.should.equal(303)
+ assert res.status_code == 303
redirect = res.headers["location"]
assert redirect.startswith(redirect_base)
@@ -151,8 +148,8 @@ def test_s3_server_post_to_bucket_redirect():
assert args["bucket"][0] == "tester"
res = test_client.get(f"/{real_key}", "http://tester.localhost:5000/")
- res.status_code.should.equal(200)
- res.data.should.equal(filecontent.encode("utf8"))
+ assert res.status_code == 200
+ assert res.data == filecontent.encode("utf8")
def test_s3_server_post_without_content_length():
@@ -162,7 +159,7 @@ def test_s3_server_post_without_content_length():
res = test_client.put(
"/", "http://tester.localhost:5000/", environ_overrides={"CONTENT_LENGTH": ""}
)
- res.status_code.should.equal(200)
+ assert res.status_code == 200
# You can specify a bucket in another region without specifying Content-Length
# (The body is just ignored..)
@@ -170,26 +167,30 @@ def test_s3_server_post_without_content_length():
"/",
"http://tester.localhost:5000/",
environ_overrides={"CONTENT_LENGTH": ""},
- data="us-west-2",
+ data=(
+ ""
+ "us-west-2"
+ ""
+ ),
)
- res.status_code.should.equal(200)
+ assert res.status_code == 200
# You cannot make any other bucket-related requests without specifying Content-Length
for path in ["/?versioning", "/?policy"]:
res = test_client.put(
path, "http://t.localhost:5000", environ_overrides={"CONTENT_LENGTH": ""}
)
- res.status_code.should.equal(411)
+ assert res.status_code == 411
# You cannot make any POST-request
res = test_client.post(
"/", "https://tester.localhost:5000/", environ_overrides={"CONTENT_LENGTH": ""}
)
- res.status_code.should.equal(411)
+ assert res.status_code == 411
def test_s3_server_post_unicode_bucket_key():
- # Make sure that we can deal with non-ascii characters in request URLs (e.g., S3 object names)
+ """Verify non-ascii characters in request URLs (e.g., S3 object names)."""
dispatcher = server.DomainDispatcherApplication(server.create_backend_app)
backend_app = dispatcher.get_application(
{"HTTP_HOST": "s3.amazonaws.com", "PATH_INFO": "/test-bucket/test-object-てすと"}
@@ -226,16 +227,12 @@ def test_s3_server_post_cors():
set(res.headers["Access-Control-Allow-Methods"].split(", ")) == expected_methods
)
- res.headers.should.have.key("Access-Control-Allow-Origin").which.should.equal(
- "https://localhost:9000"
- )
- res.headers.should.have.key("Access-Control-Allow-Headers").which.should.equal(
- "origin, x-requested-with"
- )
+ assert res.headers["Access-Control-Allow-Origin"] == "https://localhost:9000"
+ assert res.headers["Access-Control-Allow-Headers"] == "origin, x-requested-with"
def test_s3_server_post_cors_exposed_header():
- """Test that we can override default CORS headers with custom bucket rules"""
+ """Test overriding default CORS headers with custom bucket rules"""
# github.com/getmoto/moto/issues/4220
cors_config_payload = """
@@ -386,10 +383,12 @@ def test_s3_server_post_cors_multiple_origins():
assert b"AccessForbidden" in preflight_response.content
# Verify we can use a wildcard anywhere in the origin
- cors_config_payload = """
- https://*.google.com
- POST
- """
+ cors_config_payload = (
+ ''
+ "https://*.google.com"
+ "POST"
+ ""
+ )
requests.put("http://testcors.localhost:6789/?cors", data=cors_config_payload)
for origin in ["https://sth.google.com", "https://a.google.com"]:
preflight_response = requests.options(
@@ -411,10 +410,12 @@ def test_s3_server_post_cors_multiple_origins():
assert b"AccessForbidden" in preflight_response.content
# Verify we can use a wildcard as the origin
- cors_config_payload = """
- *
- POST
- """
+ cors_config_payload = (
+ ''
+ "*"
+ "POST"
+ ""
+ )
requests.put("http://testcors.localhost:6789/?cors", data=cors_config_payload)
for origin in ["https://a.google.com", "http://b.microsoft.com", "any"]:
preflight_response = requests.options(