Env variable for default key buffer size (#4001)

* - introduce environment variable for DEFAULT_KEY_BUFFER_SIZE

* - prefix env variable with MOTO_S3 to avoid env variable conflicts

* - black formatting

* - fix formatting
This commit is contained in:
benediktbrandt 2021-06-10 05:48:28 -04:00 committed by GitHub
parent c1b38be02d
commit 479ce861a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 36 additions and 6 deletions

View File

@ -45,6 +45,7 @@ from .exceptions import (
)
from .cloud_formation import cfn_to_api_encryption, is_replacement_update
from .utils import clean_key_name, _VersionedKeyStore
from ..settings import get_s3_default_key_buffer_size
MAX_BUCKET_NAME_LENGTH = 63
MIN_BUCKET_NAME_LENGTH = 3
@ -59,7 +60,6 @@ STORAGE_CLASS = [
"GLACIER",
"DEEP_ARCHIVE",
]
DEFAULT_KEY_BUFFER_SIZE = 16 * 1024 * 1024
DEFAULT_TEXT_ENCODING = sys.getdefaultencoding()
OWNER = "75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a"
@ -96,7 +96,7 @@ class FakeKey(BaseModel):
etag=None,
is_versioned=False,
version_id=0,
max_buffer_size=DEFAULT_KEY_BUFFER_SIZE,
max_buffer_size=None,
multipart=None,
bucket_name=None,
encryption=None,
@ -116,8 +116,10 @@ class FakeKey(BaseModel):
self.multipart = multipart
self.bucket_name = bucket_name
self._value_buffer = tempfile.SpooledTemporaryFile(max_size=max_buffer_size)
self._max_buffer_size = max_buffer_size
self._max_buffer_size = (
max_buffer_size if max_buffer_size else get_s3_default_key_buffer_size()
)
self._value_buffer = tempfile.SpooledTemporaryFile(self._max_buffer_size)
self.value = value
self.lock = threading.Lock()
@ -207,7 +209,7 @@ class FakeKey(BaseModel):
value_md5 = hashlib.md5()
self._value_buffer.seek(0)
while True:
block = self._value_buffer.read(DEFAULT_KEY_BUFFER_SIZE)
block = self._value_buffer.read(16 * 1024 * 1024) # read in 16MB chunks
if not block:
break
value_md5.update(block)

View File

@ -21,3 +21,7 @@ def get_sf_execution_history_type():
returned. Default value is SUCCESS, currently supports (SUCCESS || FAILURE)
"""
return os.environ.get("SF_EXECUTION_HISTORY_TYPE", "SUCCESS")
def get_s3_default_key_buffer_size():
return int(os.environ.get("MOTO_S3_DEFAULT_KEY_BUFFER_SIZE", 16 * 1024 * 1024))

View File

@ -27,6 +27,8 @@ from boto.s3.key import Key
from freezegun import freeze_time
import six
import requests
from moto.s3 import models
from moto.s3.responses import DEFAULT_REGION_NAME
from unittest import SkipTest
import pytest
@ -37,7 +39,7 @@ from moto import settings, mock_s3, mock_s3_deprecated, mock_config
import moto.s3.models as s3model
from moto.core.exceptions import InvalidNextTokenException
from moto.core.utils import py2_strip_unicode_keys
from moto.settings import get_s3_default_key_buffer_size
if settings.TEST_SERVER_MODE:
REDUCED_PART_SIZE = s3model.UPLOAD_PART_MIN_SIZE
@ -1125,6 +1127,28 @@ def test_multipart_upload_from_file_to_presigned_url():
os.remove("text.txt")
@mock_s3
def test_default_key_buffer_size():
# save original DEFAULT_KEY_BUFFER_SIZE environment variable content
original_default_key_buffer_size = os.environ.get(
"MOTO_S3_DEFAULT_KEY_BUFFER_SIZE", None
)
os.environ["MOTO_S3_DEFAULT_KEY_BUFFER_SIZE"] = "2" # 2 bytes
assert get_s3_default_key_buffer_size() == 2
fk = models.FakeKey("a", os.urandom(1)) # 1 byte string
assert fk._value_buffer._rolled == False
os.environ["MOTO_S3_DEFAULT_KEY_BUFFER_SIZE"] = "1" # 1 byte
assert get_s3_default_key_buffer_size() == 1
fk = models.FakeKey("a", os.urandom(3)) # 3 byte string
assert fk._value_buffer._rolled == True
# restore original environment variable content
if original_default_key_buffer_size:
os.environ["MOTO_S3_DEFAULT_KEY_BUFFER_SIZE"] = original_default_key_buffer_size
@mock_s3
def test_s3_object_in_private_bucket():
s3 = boto3.resource("s3")