S3: typing: FakeKey should always have an account_id (#7022)

This commit is contained in:
Bert Blommers 2023-11-13 21:31:16 -01:00 committed by GitHub
parent 447710c6a6
commit c14df137d5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 14 additions and 6 deletions

View File

@ -89,7 +89,7 @@ class FakeKey(BaseModel, ManagedState):
self,
name: str,
value: bytes,
account_id: Optional[str] = None,
account_id: str,
storage: Optional[str] = "STANDARD",
etag: Optional[str] = None,
is_versioned: bool = False,
@ -363,6 +363,7 @@ class FakeMultipart(BaseModel):
self,
key_name: str,
metadata: CaseInsensitiveDict, # type: ignore
account_id: str,
storage: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
acl: Optional["FakeAcl"] = None,
@ -371,6 +372,7 @@ class FakeMultipart(BaseModel):
):
self.key_name = key_name
self.metadata = metadata
self.account_id = account_id
self.storage = storage
self.tags = tags
self.acl = acl
@ -417,7 +419,7 @@ class FakeMultipart(BaseModel):
raise NoSuchUpload(upload_id=part_id)
key = FakeKey(
part_id, value, encryption=self.sse_encryption, kms_key_id=self.kms_key_id # type: ignore
part_id, value, account_id=self.account_id, encryption=self.sse_encryption, kms_key_id=self.kms_key_id # type: ignore
)
if part_id in self.parts:
# We're overwriting the current part - dispose of it first
@ -2289,6 +2291,7 @@ class S3Backend(BaseBackend, CloudWatchMetricProvider):
multipart = FakeMultipart(
key_name,
metadata,
account_id=self.account_id,
storage=storage_type,
tags=tags,
acl=acl,

View File

@ -25,6 +25,7 @@ from moto import moto_proxy
from moto.s3.responses import DEFAULT_REGION_NAME
import moto.s3.models as s3model
from tests import DEFAULT_ACCOUNT_ID
from . import s3_aws_verified
@ -44,12 +45,13 @@ class MyModel:
@mock_s3
def test_keys_are_pickleable():
"""Keys must be pickleable due to boto3 implementation details."""
key = s3model.FakeKey("name", b"data!")
key = s3model.FakeKey("name", b"data!", account_id=DEFAULT_ACCOUNT_ID)
assert key.value == b"data!"
pickled = pickle.dumps(key)
loaded = pickle.loads(pickled)
assert loaded.value == key.value
assert loaded.account_id == key.account_id
@mock_s3

View File

@ -324,7 +324,9 @@ def test_verify_key_can_be_copied_after_disposing():
#
# This test verifies the copy-operation succeeds, it will just not
# have any data.
key = s3model.FakeKey(name="test", bucket_name="bucket", value="sth")
key = s3model.FakeKey(
name="test", bucket_name="bucket", value="sth", account_id=DEFAULT_ACCOUNT_ID
)
assert not key._value_buffer.closed
key.dispose()
assert key._value_buffer.closed

View File

@ -17,6 +17,7 @@ from moto.settings import (
S3_UPLOAD_PART_MIN_SIZE,
test_proxy_mode,
)
from tests import DEFAULT_ACCOUNT_ID
from .test_s3 import add_proxy_details
if settings.TEST_DECORATOR_MODE:
@ -51,12 +52,12 @@ def test_default_key_buffer_size():
os.environ["MOTO_S3_DEFAULT_KEY_BUFFER_SIZE"] = "2" # 2 bytes
assert get_s3_default_key_buffer_size() == 2
fake_key = s3model.FakeKey("a", os.urandom(1)) # 1 byte string
fake_key = s3model.FakeKey("a", os.urandom(1), account_id=DEFAULT_ACCOUNT_ID)
assert fake_key._value_buffer._rolled is False
os.environ["MOTO_S3_DEFAULT_KEY_BUFFER_SIZE"] = "1" # 1 byte
assert get_s3_default_key_buffer_size() == 1
fake_key = s3model.FakeKey("a", os.urandom(3)) # 3 byte string
fake_key = s3model.FakeKey("a", os.urandom(3), account_id=DEFAULT_ACCOUNT_ID)
assert fake_key._value_buffer._rolled is True
# if no MOTO_S3_DEFAULT_KEY_BUFFER_SIZE env variable is present the